var/home/core/zuul-output/0000755000175000017500000000000015157264431014535 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015157273646015511 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000336436215157273467020310 0ustar corecore7wikubelet.log_o[;r)Br'o -n(!9t%Cs7}g/غIs$r.k9GfoyvHtMz;_^_]q퍷y]956! ޒ/fh=[gSe~}jyQ֫/㴽Z[>jy#/7W ?*ybq/7vxo/hsoSa 7D*(Ϗ__nV] +SI211jqw?|pCe^./A^I}%+F)óóisR)N &enNQ/̪_a0f368/Jʢ ܚʂ9ss3+aôإJ}w;3FEb%]˜(O)X}`\UlxXGJvwxi1M2 c#FD?2SgafO3|,ejoLR3[ D HJP1Ub2 eX"y1j3undd|%3`C\LtiKgz֝dYt wnVB} eRB0R딏]dP>Li.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9Ӌ:+V 3'Ϩ\WW&%8'# $9b"r>B)GF%\bi/ Ff/Bp 4YH~0J˹%>*<33䫥?+Km3A- ZP|9iL[NۅbbٞE)~IGGAj^3}wy{4 ۃ3kLFyS =uQ5k [Adt:yG "'P8[aNw ȺwZfL6#Ύȟ Kdg?y7| &#)3+o^335R>!5*XCLncaYB ɻ>@J$tι#&i 5gܘ=ЂK\II"R'O[l6ro%-9}tytE*,Cj·1z_j( ,"z-Ee}t(QCuˠMޮi#2j9iݸ6C~z+_Ex$L}*%h>t m2m`QɢJ[a|$ᑨj:D+w4rھxiJz硂Ϧ4Co9=]٣Z%T%x~5r.N ;`$g`Խ!:*Wni|QXj0ħNbQe絸%]zNdƭwq LJ;_ʧNs9[(=>@Q,}s=LN YlYd'7M qbEY QOΨN!㞊?4U^Z/ QB?q3yv.اeIʷVF^j=_Z{5v7xni^^J"ͦ>CMMQQ؏*ΧL ߞNPi?$;g&uw8~Y >hl%}Р`sMC@ztԝp ,}Nptt%q6& ND lM?ָPZGa(X(2*91n@^7rN_Ŗ׼O>Bߔ)bQ) <4G0 C.b~CףkB(*<[Ǧ4 V mD~q2볯Q'Q/L1+iY¥  %T%SE:!җӣ D>P.BvJ>mIyVVTF% tFL-*$tZm2AČAE9ϯ~ihFf&6bL΁?kMPc_Ԝ*΄Bs`[mJ?t 53@?jڞ(7h?cFn?DOא&nay!%dqO՟wX:) ťLxӛ*0q}0L'd1*-B[aL"T 1dȂGl*?%|L pSROޔ8'mzX+`قSᔙD'Ad [kP=+<, {Z5׷!)'xN&}|Y>!0[r_G{j 6JYǹ>zs;tc.mctie:x&"bR4S uV8?킖,~0;g2NET݃jYAT` &AD]Ax95mvXYs"(A+/+o+{b]}@UP*acԇ&~hb]l[9'݌ylSO2<lIc*Qqk&60XdGY!D O C*Mrii1fu5̕@UFB1l߽Imq%u LOL8c3ilLJ!Ip,2(( *%KGj l  %*e5-oﴍ8M*a~ff~6|Y,d,`!qIv꜒"T[1!I!Nw.v]zFh`QwkCVAg/E># On}9U}'/osVu[H<9˷dDLF amKGm+`ùXz!"ܚ: hL\1u޷P_U{GwNNXYt\oq_@gOV ]cӰJ:^q';{wV=mdZB4']a.QO:#'6RE'E3 */HAYk|z|ؾPQgOiMcĚ$H4x>bl=pd9YfAMpIrv̡}XIթJ:|nl^/GSZ;m#Nvj{,4xPA*1bv^JLj&DY3#-1*I+g8a@(*%kޏ=S-ݑ2ƹڞ7կZa{0dqȾ3̗w.JE:PgXoΰUv:ΰdɆΰ (ΰ0eTUgXun[g, ׽-t?}d,ZByX/&Ksg3["66hŢFD&iQCFd4%h=RA?ed}ĥ6ui ; dFF끄߽c1e-7C5_7XѪ;Ʃ2tStΆ,~Lp`-;uIBqBVlU_~F_v+ERz#{)@o\!@q['&&$"THl#d0 %L+`8zOҚƞ`wF~dWCg5o|ΔC1s`u.EkB6ga׬9J2{?+Fi7Z(ZN~;MM/yu2CݬP]Wtɖ9F.[-2, 5 CۈP$0Zg=+DJ%D  *NpJ֊iTn!tT̅Rhɇ ќuޏ¢6}#LpFD58LQ Lf~/EOF p rH_HI\:U}UE$J @ٚeZE0(8ŋ ϓ{B.|!p+,ICE^fu `|M3J#BQȌ6DNnCˣ"F$/Qx%m&FK_7P|٢?I-RiAKoQrMI>QQ!'7h,sF\jzP\7:Q\)#s{p'ɂN$r;fVkv߸>6!<̅:xn<# -BȢ1I~ŋ-*|`В~_>ۅm}67X9z=Oa Am]fnޤ{"hd߃Ԉ|tLD3 7'yOc& LFs%B!sRE2K0p\0͙npV)̍F$X8a-bp)5,] Bo|ؖA]Y`-jyL'8>JJ{>źuMp(jL!M7uTźmr(Uxbbqe5rZ HҘ3ڴ(|e@ew>w3C=9k-{p>րd^T@eFZ#WWwYzK uK r؛6V L)auS6=`#(TO֙`mn Lv%7mSU@n_Vۀl9BIcSxlT![`[klzFض˪.l >7l@ΖLl gEj gWUDnr7AG;lU6ieabp៚U|,}S@t1:X _ .xI_7ve Z@7IX/C7@u BGڔE7M/k $q^hڧ};naU%~X!^C5Aw͢.@d!@dU}b? -ʏw |VvlK۴ymkiK% 0OFjT_kPW1mk%?\@R>XCl}b ,8; :.b9m]XaINE`!6uOhUuta^xN@˭d- T5 $4ذ:[a>֋&"_ }Oõϸ~rj uw\h~M il[ 2pCaOok.X0C?~[:^Pr򣏷y@/ڠ --i!M5mjozEƨ||Yt,=d#uЇ  l]չoݴmqV".lCqBѷ /![auPmpnEjus]2{2#b'$?T3{k>h+@]*pp桸]%nĴFԨlu |VXnq#r:kg_Q1,MNi˰ 7#`VCpᇽmpM+tWuk0 q /} 5 ¶]fXEj@5JcU_b@JS`wYmJ gEk2'0/> unKs^C6B WEt7M'#|kf1:X l]ABC {kanW{ 6 g`_w\|8Fjȡstuf%Plx3E#zmxfU S^ 3_`wRY}@ŹBz²?mК/mm}m"Gy4dl\)cb<>O0BďJrDd\TDFMEr~q#i}$y3.*j) ,訨>X1 smD) ̙TީXfnOFg㧤[Lo)[fLPBRB+x7{{? ףro_nն-2n6 Ym^]IL'M+;U t>x]U5g B(, qA9r;$IN&CM(F+ hGI~Q<웰[, qnriY]3_P${,|ռ?[70 晪MV)m8[6-Te@`E|=U D(C{oVa*H7MQK"<O%MTTtx袥:2JޚݶKd7UZihRk71VDqiގ\<:Ѓ3"gJJčE&>&EI|I˿j2ǯɘCGOa9C1L ={fm&'^tigk$DA' elW@Tiv{ !]oBLKJO*t*\n-iȚ4`{x_z;j3Xh ׄ?xt.o:`x^d~0u$ v48 0_ | E"Hd"H`A0&dY3 ً[fctWF_hdxMUY.b=eaI3Z=᢬-'~DWc;j FRrI5%N/K;Dk rCbm7чsSW_8g{RY.~XfEߪg:smBi1 YBX4),[c^54Sg(s$sN' 88`wC3TE+A\.ԍל9 y{͝BxG&JS meT;{З>'[LR"w F05N<&AJ3DA0ʄ4(zTUWDdE3̻l^-Xw3Fɀ{B-~.h+U8 i1b8wؖ#~zQ`/L 9#Pu/<4A L<KL U(Ee'sCcq !Ȥ4΍ +aM(VldX ][T !Ȱ|HN~6y,⒊)$e{)SR#kהyϛ7^i58f4PmB8 Y{qeφvk73:1@ƛ.{f8IGv*1藺yx27M=>+VnG;\<x7v21՚H :[Γd!E'a4n?k[A׈(sob 41Y9(^SE@7`KIK`kx& V`X0,%pe_ן >hd xе"Q4SUwy x<'o_~#6$g!D$c=5ۄX[ു RzG:柺[ӏ[3frl ô ހ^2TӘUAT!94[[m۾\T)W> lv+ H\FpG)ۏjk_c51̃^cn ba-X/#=Im41NLu\9ETp^poAOO?{ƱmPQsľ;~s% 'ןISbUI y4kCz%=lRѦmDVmxюݏ_wx]mD@0ngd6#eMy"[ ^Q $[d8  i#i8YlsI>2(ȐP'3ޜb6xo^fmIx nf^L7>"0(HKkD4<80: M:'֥P!r"oL㣣@ 9n# " $fG#stV \G~}]gaSZNg8>e!^f%cYr]qs:"̊[isXa]d+"z=x7p.fZCg_Ys;pE&\U}ܪch])qKYAـhhdEnU14.& * QIQs[rԩN·k83֖8Uuqu_48dHܥlWw q>fu6+'}x{u\Vee9]`3=?,ڼ"ϴq*(A/ֆ:1IC8qc3SR&.w,qk>MPs+Xh4iyvGRQ=]te]U+2gSt!8iIۛ*JgE7LGoƟ\bC}O i ycK1YhO6/g:KT sPv6l#uN|!"V[^΄tcDp"'0޾{1xCNQ1G2})*'>fC۝'*)"5.E2IeD 2.ZdrN6Uœ=/}J o[aG%qf*WU^k1f3ڣjm׃>ƽl' ,Τ9)%@ 7i!eҎ\,f+,Ąt8~ڳ~?[\FV.A'!,iGow5['YToҍf5ޓ[he޿95S8DGZ@-#]z:\mOo{F-8G#%.fM8Y=Gg9R7'a9NPHGnSd? 'xc W3\+s$N9tfWw1n$՘#TI{h5pfIWGev.F_{!W*qW}ӼبZ&7!5Y'3KtpŷU`<d5J{@;P~%M q 9m?|Ys2uƝ:m|ҳ uE UIL`F &ni4~n3?n-Kھjۅm''J[/m$g4rfrvRޝϏO'/g]fyrd-i-Iv#GL`,ȃ1F\$' )䉳yO=!6c+#  =J7A<+  mbhoCiWgii?\e%pf&>7V<*EzfH{]:*6M x v쳎M'.hO3p-IGh7 ܆hR ]zi2hB9'SVh*c6qNϖ~[*K1QA="D:V&f:{N:>^u]` c/X)mS5KC?":{H)"%,!37{"ZWÂk>F?RJ>FIY*%5HgS~7Ď89o؟/pgÞ tNXB-Gj{ٴ 3Gz~p؍H|*cyp@\9/[up`uV,\KCB\qGiË__mfWͽ2]Q6ë՘`ߋǍKnK"]p<)Xg '鸽 &Xu=z`ng[#ɯO"=5_6Vg3gR(Җ}f`ӀS.8?a?2Đ+EV؎x0bv6 fd1^ 2ӎK-6xAH1H#:f7/{bv/34'h9Dݥ:U:V[ 'Mȥ@ەP?8Iڏ~ԥ; R,wt'/4YS!kCD{qdɊb"Жi㳕Q`JRҴ225UY5li_;k+rlCf*8f- Eי_lq.JVh qC\o74k1%N|T$d[qw+3(@z$.K!#Šb2ݠxK-di +9H7; O.i2.I+69EVyw4/o|~8Ën]f);ٍVnSt%_>žBZ罉OMCaCcBҎJH+"|ūyR}T~jew 0,qh! aE-Z%ܹpU:&&fS,xS"CV8i8'-sKB<չs">|{MC8;MƒK7f~>9|dUA";!$j玨Kx E$K3hN(OtÊ-#ӿf N崛, 9g3Ǭ{&Vdӎ5W1!1KYd`,-*&Ӿ>F~/&jb.~cK5[g`!|d/.^oyh!; >|*'@x6yR>ngg 枤hˍJ_S{gskI\t`綘080ƱQŀllKX@116jio?N #r>Ah}ʪjnk?p ^}8"OC%Eޑ 5@f,|Ά(*(XHsc`% s/A[R$קQM17h\ED#/@>olJ/ť_i/ݼGw eQJipF|rH9=v?eF|K- oWbYꤟvM 4,]e/dy,8!xŋ5 {R<^mYo 3c9QK;#7.`-3})N2FV'{yV0Slz<'&ۧzb/.y>n# rov}e{椱I twp)U݊7iJw$Pz@fq { b#s9@*иrI?@*qQN|Ix;I}&ݢ6ɢm{]x]_>Oq' *=f\0zE|!@E " ;9Ώf3kZc7Bɪ"O+mmBz-p^:ZYUv`Ƌ-v|u>r,8.7uO`c Nc0%Ն R C%_ EV a"҅4 |T!DdǍ- .™5,V:;[g./0 +v䤗dWF >:֓[@ QPltsHtQ$J==O!;*>ohǖVa[|E7e0ϕ9Uyzg%pg/cc6RS`HFLЩ LkJu\!`0);Sak$Vfp~C%YdE6c>1ƕ (0W4Q>@>lWN"^ X5G-nm.8B>NOI[31,j2 Ce |M>8l WIf|\q4|UkC.gr`˱Lϰ} xr.~l-ɩu_Drd31V_ѺUib0/ %IYhq ҕ  O UA!wY~ -`%Űb`\mS38W1`vOF7/.C!Pu&Jm l?Q>}O+D7 P=x@`0ʿ26a>d Bqε^a'NԋsI`Yu.7v$Rt)Ag:ݙyX|HkX cU82IP qgzkX=>׻K߉J%E92' ]qҙ%rXgs+"sc9| ]>T]"JرWBΌ-zJS-~y30G@U#=h7) ^EUB Q:>9W΀çM{?`c`uRljצXr:l`T~IQg\Ѝpgu#QH! ,/3`~eB|C1Yg~ؼ/5I7w9I}qww}U~7뭱ԏ,}e7]ukDn`jSlQ7DžHa/EU^IpYWW兹Q7WyTz|nˇ _qˍ[!;n ^b k[);ng]ȶM_u)O_xV hx h[K2kـ`b duhq[..cS'5YO@˒ӓdcY'HAKq^$8`b $1r Qz?ۧ1ZM/G+qYcYl YhD$kt_TId E$dS:֢̆ ?GЅ'JƖ'ZXO݇'kJՂU086\h%1GK(Yn% ']Q; Gd:!gI-XEmkF}:~0}4t3Qf5xd\hEB-} |q*ȃThLj'sQ %؇Gk`F;Sl\h)5؈x2Ld="KԦ:EVewN ًS9d#$*u>>I#lX9vW !&H2kVyKZt<cm^] bCD6b&>9VE7e4p +{&g߷2KY,`Wf1_ܑMYٚ'`ySc4ΔV`nI+ƳC6;җ2ct"*5S}t)eNqǪP@o`co ˎ<عLۀG\ 7۶+q|YRiĹ zm/bcK3;=,7}RqT vvFI O0]&5uKMf#pDTk6yi*cem:y0W|1u CWL;oG^\ X5.aRߦ[_Vs? Ž^A12JQ̛XL:OEUپOY>WK-uP0\8"M: /P4Qz~j3 .-8NJ|!N9/|a|>lX9T ҇t~T1=UF"t; 8-1I|2L+)WȱL˿ˍ-038D*0-)ZyT13`tTnm|Yhi+lQ&Z!֨řoҒ"HKX 6„=z{Ҍ5+P1;ڇ6UNE@Uo/>8.fgW]kY0Cgcu6/!_Ɩ} ' Ў3)X<seWfSv!ؒRKfs%(1Lhrٵ L.] s?I,HCԢ[b C-lLG+@_$c%* _jR|\:dc5u= A@kUc\ǔz;M>dUN/aFRĦ@x؂ǀ$6%}N^ \mQ!%8j0dUo=rh>*YȴU3Q,̸*E%59sTzɟڮ2kg ۱wEUD3uKrr&"B:p`\E)j<).R&#ÃecE,dp"nPS 44 Q8ZƈKnnJei+^z '3JDbSK;*uБ:hF ѹ @˿ޗ~7g9| hLXULi7.1-Qk%ƩJ4^=ple;u.6vQe UZAl *^Vif]>HUd6ƕ̽=T/se+ϙK$S`hnOcE(Tcr!:8UL | 8 !t Q7jk=nn7J0ܽ0{GGL'_So^ʮL_'s%eU+U+ȳlX6}i@djӃfb -u-w~ r}plK;ֽ=nlmuo[`wdй d:[mS%uTڪ?>={2])|Ը>U{s]^l`+ ja^9c5~nZjA|ЩJs Va[~ۗ#rri# zLdMl?6o AMҪ1Ez&I2Wwߎ|7.sW\zk﯊溺^TW^T\*6eqr/^T77WNZ7F_}-򲺺VWQ77V\_v?9?"Th $LqQjiXMlk1=VzpO֠24hf 1hi D{q:v%̈#v^nBi~MefZF >:/?Ac 1M'I`22؆DT!/j璓P åiw@wgRCsT~$U>ceއE)BI>UljO|Ty$ŋrwOtZ7$ "i 8U 7bSem'޵$"弄ټv'qf2Ɖw4"0dS1oËlϯ*eN*&QdbWźuwB#'WHb 2&NGIXUg';Jd f$MQbJzϙ"XބƏUXa\i,2SicS1VEdB"oFk= *F-J)z3u;nOOyTH?!Uaj/o!sGq*Sh-ţ2R& pHieAU=A*4 R+QGv1o0i2VuG~|BS`)'E)dz%3hƴHq !{z지}Z>9JJ ^gNI:%e'_a<2͟_΍0njy| %{^\نtW3Hޏ0<xʴ iE <:6f1kf`E蘎V$+MZc зJu{Yъgs\f~Olyz%4_-P G laya40O\dC=tȤt;]d%3zFzx?H2S>#ßvsO7u2O9q+礧) @<'}}OCC]1ef,?`]6v?RGŋTZRku P D(dHϦS3Bg̺Н _ {?EhfN0EMcM#Ͽfk?B?i2y<_[w羲XoǏ<1uڮ~3TU xO=q-(;;sJhT&s_e_ߪ碩Y,YzD,L>h, {weeԊ%"T""YPŭ^^&*|1p1 vwVqYz̻Fόr]GD癶`gHLӲа {-l+yE=sI$͛!oƁ'Eq>j.P[SIdY 2JQ`<2O _l= B^!XF Xa_hچ5S,,y "&,)Qe\M QT˼I@|r2]bR//.&U!:?WzE2̥,뺨^nլe5kW Ϸ$U{#$I k13l SAZinI-l=,?+ ? Yf)p`ު`~)8IPL PLn Ľw?YYu\4]48 p%`NN۟_ǿ<̓-û^+~~=m,OgOlk{[ߛø9?r0]/a|}I %C 2*K "~dA#z&krFOT:s;wp]pIxGMS4i2̋7&Z'Vt(iʫBT_ |-˦ AʟŢfEA=9TX@ ~4ɇ7'$  `x"Ϛ,86l1yGĐiGV&逛l;9*v#6].#ݽ|KYvM*//iJ0=7e u}MB'-uoIP8'Wby(x*GB(ε4U<õ{ܔ_I\FN1gj &o}6 !*2X6u$0|A"_+aPKz BFl]5Ke:9M8L߯ז4"i}5Z@na)ۓsk( _N^V=Ԙ34 jk8pX>Tcv\$i}- \.SPg.#xϼ(M[Gp 0`_>e :. #Շwg$lGMٛ*l~w?eI&Y%>||]]6GmP7Xd`Uڭ cMt9"=M[( Cw~:Q }mw{nZAgxE"uO_߽zri" 1:>ҧHԶRKJZU"x4 O4NM7WP4ӢHm2- ZW$NЉ> K=:v%e_E"h"{-xzEC=$/{O`^%W=zE#]S3x$ZQdQl4MvI(L-۳G̃X%Nf<.er?Qh3I!g44Bkڮa\n[]$ڮ  u/#&ۉh p|D#]HUI vNf*jyjkڮE 4@ޮK捭D82ʪȥ7Hc3DfE{02!BuTjX4ZUxeMFMh-W\MY +ޝ3$SHv!}(hH_ޡ߽7U<Ќ%v$^7֒ƪrMv`TUiCJpSJ.EV{=ʔgƎ0og|{LU:5 &r+-0U'v7ٶZRn2ƟLO"ee"ê,ۚv%驘*nmPr;Vj=S pGCsN#rW!q<=FܴV4?s5˃B[YS]xXZ f;/MӶ8ɘQ$\ӋBni938cxb2M3p]8-ds&`kv!qG{pzU簶$z] Td\WrnLq]U;6q]UģCZ%7[hHl8d5Ec5UatLJtUEʇU<{}>`ѩHZ2ڠJ4bRӢ!Vu=T+n+9~FQޡyҝH.u.roNtӎÓW\/,9-Ux}VS}uX?m(Q=Dg"ojzbș6g% Ou`1ܙ44^O&I(q*Vw>xmnm[OBG= aUaz\[6~dsoiT1zlmgG5zsj>2bY]_qCj;ƿ89a}yx!9'Xa{yr?bsc ͳ ٌXD=(%1C`,Кc(l#p>6Pa()(}L7tІn(:0\gQzLDz|#`YLx æ|2XYCa Hp} i oiЮ~6`پ$EpN,s#naWt0 wu?Ă$Pl pJQaM”0h9 82K 9f:p}B:Z7~k`ajK&9PaY|-Ó?tav%^?5ե[ٸÌh a|RS,n-R/ ԝŋ.IWr,;c6^pDuM[f῱8 ٻ޸%W^Ϝa7ټ!P#B"[OƆ!9rѱFVuC򌚶$H$}|]}WE FP"JR^\Gj!ܞ|h|44?Wr Hcki| S( X Y/P`jT=@08TOg lq*$쏭 PuZiiHˍ{(H?O__Yf>.&ʄ*x1NoY'T`~&Lk)hV>~]{S'{$'JOGՔt~&:Eo/TIT&/f8C4*fF ]u4؏=]Ql*a~ʨ{$^|Z17t`Ӂw+lDƮ2țwo yrߧg_^tx^7]8v{ X4.! ½^*sBzu>l]H'J8mv{Z)S߁h||S[@ѹh\t@8~_FÔ᭜p+ h;e2: H4BL`6bD< p"xg'뇿$@vhÊzr X4$#R:i;^B_.zɵ梇j+5o?75@ɚ#A(o w|εN9]E A~}s * rs;NmrrEv_(E`9B(j7<*h,9ӈ݆3.CNδh[ݒ;- }smX\m}0r-n\ǿk8#qrXpt]{}Xq;pgMNHܮܡ_~f;ļ9^uDtd_Κ O^q>NP[n?y9A:!.:&pށeBV֝zvK1is y~{ _.up:,y]~_-Հxt:8_! L{NO)wB.IWZ@'Sj¾;w5rl➆;TrZ@^|hbݬt/qO[yi|Gp7. guVp<4p@[I=EB"EFp.P:n;WmE~ 9, iYbL)IJL Kt!^DpueMW ?tj 4L1)x0zKL[T:m؊ۉVS(>W)AV0Y&6-R7,[P q>Mt:3gc uq=R|]2w.cij0:2!L'[rtH~Z(cFRSeh4/2G"Qy<\ʺhsn^@Ks=8%a=?"ިz/3䉍Ю8bsRs`!X\]2 > Cf\Mu_TCU)|! ]y,˯>!aфԧi (s\^IQ 0$o#܂} ~z}_6sq fbt|KvNb8yr*CܾhRt/G|\;tP2f/8ן\L`'*=Ekvg)Z_hB;H|moh"F}g)A<yyqs1FEcFQ89btuG@s!-`dk:1Gӳ8+ J2;#:ZՌվ|#48GH3 yV#em`>L 96$npu\pMLg{JU8=Rƅ>ab[GujM84޲*amJz?RNz98 / e?*nN |U>D3170?UemQLmׁوik9ذ>MQiw8@8 4 FaBa)k)ph7Ay =YޥVnT YY-8g@o+e’aBCYjn5 /f[shAh'_ܩF~H@i&7fNg+XQ} =WMe֛=%PploHmI꭬Vr+y ,c7"59V&*ؚoBQ%a~"ۊNE-(`ue!i2iz ~!/y Jw.(}tOA.u)ݥOJثtn;zԳtSvݥw٪la *OSP{AUAuvyΞ:;W;w?MP|AUAuw}; APUPA&;.h4A= K%7K Y" 7J92HVW|gF<]e^\Nl<zpɪDm6.s_h;T1@1Io1/,Ӳ"Q4}v~ut|m>o>2 5`t\:;߽C!2+,OwU hN^2Rk/Q9q?82n1jܴKT <<=nEE7_mi_?^Ti?Oˬ@-)4n1߂*BsK^ Z>j?,!^5jOi`2K4DK:w>U?CZz$_&"4Q ![D MsxaAHUj(o$a 3$ rG2@hݻs? ŲGj@2}]zwn۲!6?ܦF@xX0 _3] ̖sh<9_h1>RaLd-KYd;0`+` Փj t9ZN4dig3Űz5CdτޭAz+!=uiTf f6 vhq;1" aכjY0`?ي\eVQW&I8F x2hm`[)2`8z+mP3Ղ P"53{}8NfD6pQQ9P*'Q #r';*\>_-t<[GRlkU1p݇|5tjd,02'0ڽ&$o9ى拌DWIrJHx~&9CY5mci0٦>ޕloSڻ7붩stDE90t朲1Ja,_m!*\kj1SͣެJ@MWf})і-o㡲Q ]6U Q{/G,qm*na_o"Ng`ɨ>혥8Q|+7V}qЎp[w̡L{{S!BYB DPFt]TsS켵 *Wz`jҸ: }dU\ezpp6Pqa65ahpYiw 5VwJs(R$>};)qNs{-oΏq VjzV0 ?>Ͼv\%z}[3)أ7tu 7~i&K JCxg}oT_; ][uɭ)H k Qۺ b(AN=mb"^5ʞp3H}x?@mAwV:e%O,X˦K+A^6 LA)+Vn|uJ`b(Ky^/a=If 1+ރ۰jCcҰ&(]V^iAK6&7%upQ^,&;ͻUcgwLmZf֮ ڸ@B,\, L)B#yC-+̱qs~)/ڈ t~| T3a:LKaeDFV#cV֑[KBT4/&{[෺$S˪+^nuWY51GgE}xi&nil&RBEq(ʣ6 .>R;n͖VL8/)uIQƺ6^Hkgj65֦6֚ڜE]( !p\ki&NqQEs;~cr]e󵌛#9o^;PF<qkl̮LmS>SM\%H(Po6Ɵ K)S$4eH侉&~Yi9gmic{BlMޒVd9ۉH6vψQ4AoqGeLV_2qetit/uD92'i:Y&ߡHS?D"'J{9_1Y!,`^ )EI4ɖ27HJeq-hղ;X:2~?S{vhsS{|&ɜb;~vÂ}0j_3~~X@%rzտ?<ׇZjA9c[z{gsWݾVK^_5'M/4DC?x-5]mr=r??o 8_Wm?Ǵ 0T` pn?k~57[a}5G|_v/v4Di~rU ;M\y{qz- W;Ԭ]#ۡm{rKL\G6Kh_LQm?}?vY2<@qłBA*%DS>| ⿙DÂZf(k%[_d^, ~ J&@;PߜUMC" qppN6<*Pޢ!U\[ܡ"pW~bb(tm}uxւAMQb*1(Ba(b= h|AuCIDPj3+m2Ao"QS[ y :g/ T>ʚ5_99zD2YS^ן(D a׻G5GzU)4M*reiH:\&z`Ǻ'x栙hN5%$v``56S"uu[)~(s FuC5nbrpd3աH6'~#@kh,>*(44 (F7 R}oWiulJ0օRI|g."Ax֊@E 4F0!j1bG`Tkl\ Y-^},j/pUaCf"H |߶`=qc=#sO^^́V F-giS$yu삣27I ]pzQ7I 8f=uE96ۼqk-%:WkHBIdIg8n>oM}cVv9@e (8 5xVP$ۙ_k-. ]МC*kiK"k3wΥ.8gHRr&僢IF`)FP7tNBMS@3܉#[ [EsJ{m$)4H'>t;Ðc1p^E˴gxNn,+^jMtvcE6J M1rѠtv4mdz.=]lQc&7W,42;c3&Ƹ[3X;cTL0`Q#ó+bLgM)grG;_3W,K85 H45fšHdqZoH.cqȱK_ѲnOLI 2ڄ EߘСH:SK\+U+Yxǚ@"̄D)Nx!\]]0 C`^h E_sرeYiӥ0cI[ 9F2~Oޥj*vQ.K gc b8x:)7e$MU]N5Q[vQ5)K R0ҧتa:NO31\No^0+U=Rꞧm˴}YVـB]<K6:c(J1^ e'ŚP_d^(bMYlB/6sH%^q"`JK<wj`B\5YD|y$HqE!)d_P tH\WLEK(%7ʠ&pI11OEeؑF3^Oʰ^oQB3֌dJހpxT`bXůsIm v63Sϭ9+&RோE`%lQ= o >!!-at-#}tT:U1Sjs&~m/b0Uf:CUty*oTo뾉Ta^(so_Ey.T)s\vOaGW륹뷫^m 5>q3SoW$X$%s6%P"b*RĴ#7m{ڗ3rWfTYv螆}GZyb A*8u6W#I\|?l8{LٲG-(٠iS<'*E9w vYm%CC_Zu)U~D_0' !4=?n/]pfQo)wsF}?29q輡'*)#wLB ,z jg<(cOq1qԥlQ.zԥ;Q^ QzIGA&C!l[Yᘣ9낟,i[nVFb&!EKNȩB& ѥH:3"o肣cV:p/t /% `LWM\ژ=㜢4f1Gsk8-n݋w1}{KQyTޞ -R,p7pleܛe ̹"Qi0j S$/{UȹBywic>[E7z3Hp#paT98'[5eFbP$ \ֺ&}ŧ{ϖł?+PJ] -%7rxWozX7~8l vE,A"/.햘)Z?w9r0/tP·OCCZĤD.Ӝ";,qe\ mFKY֕3k,N[(s^kvS3'q"p`/yz}1.*ͼV7/6k꒾adI>Gg?]db0ƥ;FމtYKi4nt/BSn 79}Jl Rlzo^Jg;/d9 ;U7=ptL|{(RGMI.G? ]yK7*8 #zYM)1m³ L,BϨ(FVTc{8zMbcr5)"qU9|sV"i_/b8[nf_>7fYz\ x?Ϊ}  v-ǛC83[$ {/gl K=RܗϞbٮtRh'wNtEؓ^7뢂6.yݯDTF-ke7vvpɻD@Sjl˨1 +5*uIBb4YrQ{]g]ծg$w.8f¹{S@ju"^~ih5H\jOVcUWiz<}\;hF^T+EHy{rEZ%4 :Z(98CtM]p*RY"Bם,T#O }B:>Ua0\+YxZP0R䘰]qIPI]j,2e{Q}s &PaQ[e ZaQ[CiJ& bzҋ#p~< ->uKijK=pl :@ǭͶFp4 rќe MH:?Mt6Ma<= Rkț_f .̆@Dii bJ0օ;$]S e`X+1$ HJUρjue ##r}itjp]ȲrtΒcXLd3k"|-.C8h QBA;%AE^SSps \Jkfu]lQo4o|F;6.8YqjF 7aɻS+:esT.-14bNTPg` !U!u0"[b"|mꁣLb4:^I,TP8ƣ$d,j)cę|qdo򻑭ajjEfd.y)a&q[?K8vMPQ%fy! OF!Hh GȤC_SR5G\3{u| L!W)#8W&0Io('נ$''x1_Mv,_q=>No㣣[xYy,(*j[|Th8>v]gW`CƳ?A 'I27 UCEʏ ]xYy$1,TE@\42qe3BJZ`~6S{?4½Es2DJDa$)E4G8)tJtm^Z5W#3"j,[+^]Z sWzeQorYqhgI w=Lׅ8%SFm_[ Jj*19 Kp*d\N'd\3R=7%o. r/@f%쿔crcW'[39fQ%xGNo{~០-ϳw k@̆1ח|]]j- 9wZa֊cY4Tş kSRO6$ծ`Afڀ[AR:@_6,ɿ* ӇDo!#!rڗc><}RG>9zzoZv_w=!+=\A"T^ICtI( ܠhFBvhF-@}Zvr14zS *iNoEj'f=Z~l5$]h QGBK_iX`X򚥛/F+ I>d4& R:M_,Z!7# ŴGO~k-dz Fyzʢ O U/ I.^܈ Ib~w PNU' lQiA=IGYG֒[D0N`!ZbqU#wc#Wrk&ì< K9sN@;gtDa[A<G_Jkj'4zdcRz)7Νxf^|Ƚu$"[=r cW54뛕`?okٺuCkfՆ ̋lIBA-1*cf<(&b{fc,Lף,hjJ"%mf'A֟U'LshEb,QQ2va*ȚH3aAc1.+zZ{k[«_Y2\eJՆ8RYN)6[yaG^A*72RO^+l@ҏJ!4|n^޽T^;bNgr_zJ6Uk_oӦ,^9 BꝯנZ3o@= _SMŵ^cQWlSU妔.hڃo٦eF`kMXvi^ H30_oEfRGq`xJEolkVIܘd[7c/ȆlsXw赃*h u#q`NM/zs@Aom j^KDB#bކe;jKJ_@bIe=էK7A+xlhEPmnyMĥ}?n1J ;^w3~=HgDj&r =0>@%ւ!*!-STyG\8j}H(nTHK]'*n3bkx. DŜ=#s' ~Ǯ%c' M ǖL;RJX!xb@PЖaS{!hBtdtdtdp\KZ2VPH@f8JQƑ:MS^Zk#1z j]gW1|HU]uk*7smBͫ/A̡*z6SXBvc}|lU!ⶏnLU8W~74yqН8H ,8ގX盆ǪbI%" ˍI7lNO'I]1I6Lڎd׀] q3f/O`U2ֽo-v8g!\77OxR*>;ݟ cY 9GY X ݛ,E$d)"EBm@eg)hCLBڮ'ˀf/jqʁ3g>ÿ+ٗ$.c&F@&L6fhF`lo}|8H8i!9yZj.ǯϷaZqзX ȈBs&Xk HJ1&T քSFJ*Sp&Ռ> BS# ~ܮv t#aD8LWi*!^:G ke( xsE1vXRv]$ӎ\w~~glKdu(cxEd]\k&{96iGbW)6d wlGP;U/WcW`6JRK "MQ.B @bd'-zε!S;us+ vsc=vNun#DZxk CQ>ȺªWO+.zdaR04D4DreZ\.\#밃B .ZR[$[:`W \(kjQ[ 3o֣pдGƜ+#c$R W4(Mi_qzW_6Po6EV)#6kήGlXs)$$4n-:=-2v {bZ[~_zCFFz}yJ;+ە GICD*Fs I+DcB(h M(Ԙ|Ҏ`I;rz~F+S)AZ24:!W\;MRÅ2>E!y*AM.k~_9D 8U#XPYň 0t)XNҶ#| q`[HV_ &M)%`&0i89l-)gA`'B|Id )2XKJU; & 7 ;Ál"8$qXW P`<{iZnFN#ׯVbvBb6N`' Jnkm#9`+J~1$^=/gTHʻ~3#j(J┆dcL몮WwuU}"FH p%OpO(Ju$ e#-xęЊSR.$E)%b Td3*X""ɟMt*]'Ie yR} o4F,QŊ'*NmoIs!$\3g"A8u >/Q`E@ZJP:%&vFe; ;e<v ^vF$lbՃ?tE)\,!j$$JTqD҇>'qh"IA _+"Il&M2-_{Fm57+ Jr B5Ia xߠW%(ʬe6/Rp|b@rs{x?I3w5Y/jSj?W#?'0/[bqSg|B0_G ~yb_O_|=~539M׎ޤY:Oqoi2z_GWrڌ߬ϣ_k<o># O\}2e'ל(_^..gSWx||S4q@dӫPr WjywqIgPnr\օ'[?8vc>O4{1 &pjJѡ绒o ZxBM@`8XZ Qi$1ZpyjE*ZP͐P0u;2 0~-qu(tj6H&B\I?V26 ] 5|7Tp3lT`) lbhetH 0^17ښh $mm 5Ip0Jξvp;8v 0WqpYKHCq∫dKF6T0@ :@5 {i ‹xx W&ZZrز7R6 LeB[XPp-ѭwJFT\$a3,T|LŅ_璷(+_u_dT߶&OZr>cd`ZtL<\#b5蝈L0 * cûHRZza@^6tFaxwEaXm%> +8 t^I9-; /gq|]~*xUo 1.FW<̮W@jO =LP:B㫤M婿v4 b_IVxRy_]Y Ex/.sׯD(IWezk;Wﰑd#;  L~OU`6 WٰADNDRڊN,Q?/́^JA:t~vtOS aQgxmARYڤbW-ǤlBC":^Hn[$oB֡!\úqCH"m19t%jY PjꎽZ^[@޽.(w.~/˫c`h Y}94&ׁr>-*_J_O%Sgh$t4ЃCb䦳j~xN5ϻ/< >zh)P^y6Mx d }]W6KbGՏn^x7B/kg7{j>jw6xwC{0ŷl;ϰ ]CwX1r߇V7 $~@ڵEkG ra≍|LjHݨ!ϧsSK7yzoS{ƟsW qO| Ƽ=P{oJ.5y}n2i·_ƒ:̤RI]υHȵ48I"3¶ɤh-[ (FdYXTPi/Ky מY g9WmagGD,DؾLmqߖҿ)>TOa@sR9 @î5@Ƌ2O 7X?NᛪXf`yڬ\NS穿z\Z@WTqߒd^w Ԩ~gO9It4H. W fj[a=s٧[R Iy=jyw\}K. fA ?09W;YLS8-0>0OMcj # !!HgquєWNҏ..gi!8ow<2yd8{ 6IP$>f33}NY HSyY9Wʭ\ރL=J⦜֮x>B\<Of.H };}׎b)~p2d>wk;:mi^̖~_Ud !p|Ξ!Vy3w/XY@؇#$}c$w7#̟|`)5aVVH7kRN0hU\Ը 0m*ǻ|3hi62Ns.AjXҔjRmLs)GmmIZbLq C>EhpHELM&7<ix dQ;M3V2w<A֛lIvDjE%Ijɞ൤C\Ϫ{8.1?5TiQKr;LھiQ+Z05(sNU|IT7ypLѮ]zʻP|뤭]d,vSkz8B<鬝/p}5-vvա00& 6Ppah1cy]ִ5BX^5coy"ýfm/2 훩#>r7nJ w^.kVio*0GXFᚗҚ޾[bݷ[b%&ٵZ2f*K(}֩\8g][9>5+sS *Zٗ2 d֬I}qPf%5KRׂGrH# LatZP(.+ PBK9cBy(FlsO(OO(OjC( wΤGYͺ,Dd e53\w;u~pTK!fQq+of7d%6j"`Zm)E!poF;K%xtS¼no7eEW.t!z"r1,ј&49.Ihn3DNg*Hf,29TTjMSDٯ?k6xذNˢjh-JoӤ̝7[!νae/ς!|1׌j ވO7_c?+jgy)Zɛz[)հ&S#a<$E*5ɬ/BpL$qL&\of67MYש8F)&]qBOZCCݙWLZ#Ro<RKU_3鱜*Y~uXDC2V-UɯV:fD̠$nwdд}ปmuWDf?X UF.I3k$AXY'8wɣ(ɧ?^n}~նB4F-YD6*Poy LN ޤ XN8U Z6Q@-+*[j`:Y@CVuY?hK\nߡ)Ԉ iZmsKP>રYR-53p_?{G =FHֱ^vlEzֈ2)ɖN4_],FcW]$CS>s?ܼ .2xGeKlYWi +Njt?uNL}G/`5k\.QY f792Us|9JήM~KͷfJ 4:Ty{[oK,;vvr>_K~7h]|v`h-Y xHlx\~Fkx0c?QkHij-Lv[.:`c~{)g X^ ŐeNH$qR0fͼ:/!h=I~v`2nl)V)<9s0/%Z܍XD.蜠pɖL=ܬ#rm?o"o$Ku\Rr A.67M-Ws/y ]b;_Kǵ]:k0<ʷު*~4ۮɀbtwG] of4Cb%)܍+}s?Bˑ߯ڻOm;tC_Uoҷ߆0^/~(|ȩMp]V9@+lݪs܍׿%;Yq2w?cb 1|4KGPۋԯj2:vx`ނyVք[mpnT~{)dJRb׬} 4 ?TKqiE71lc",[q@)[1/K0ZtWz/\}.`}ݶ_./_7yMbQQzhV퍪6ϻ}K>%n%&5aLjz|KCY#gl^nע!heXmt+4 > w[Çw9Gûg` =1Ϯ\BsCYߴaNdn'Э.x{~ i2hbzH&֮' $[O$7EBp$ÞƐD !3u7ou?őƥqBH6`tހQӴGe馩Ɛrtko~Q57wR%00j@b$=x6 BA#Tn.LWbTE a <@fUµeMӀj16OTHF`lL>081bYLi1BO l` VAF2Xޢ@P`exӇwhE=E7XCGP8mo \'zZǐ OHG*[\r}jJJs8H:#^$)F(q "HC0z9.4bd-SQ@>@Nqr Ӵh a쳁)nE==WA*nd!+8.joQ{OTeeHeď fLQ{OQU"2dRd[z{zZb(1f@T|E=] XʧUD(E=Iׁs0o!#YUޢ,N## h('RX⴨E]PBG8UODpPUsەj+L}L؜ry צ}'.נFUM1Co7@7WqE0kI:Ƴp- jl$߮;դ_8ﺯ߾3O^\G}xߪqo˶7͗xe._>V/kn|`Fi}E}_9I݅Z8CDk<;@TzOzl_3K|-*],m^tP%Ukzyj]k;{_v}jszɼ]?>l瞇^z7iaRRsb_59n%.\~T^{u&%f5b5pu*Tv:ve1*8.IZQ*䱱MOMȇbh4p\KmfV@P$_ls7!:WM' g]bj𦧶[mCkB{9 ʢw=))9+Vh*Bk|cBUF@VWn4h.jRH6tgU g4&`gƲ54Ю{#jpak5+Қ`9g񛋾5>hEx>$  Q#8# #WN`",݀~ziY kBE]еmPkBp԰pPgPDŽxhRH>NUqZo #K|/Vp+*`nZݮÿVʏ v 2'#䒕 tqGH雞/7kkX>ژ^ A7Rbq,5 4- Amzmk-8'n  M"I!ȋNFf.M+>˙K:0vn2<4 C؈X*F@F\"A UdColĪf*7w3e0Ki!YI Jld!TFN VH%/EIrL !@iNKi.ӜAb"& +G@#$AH6\Ƃ-js5"RԈ/,d[#3D$B2B!Vd{<ڮTbkK[Q U'2ׄl eǴuqL/똞h14@)Z1BHi!pRdK:p .NX{k~qvGQc Og*}κWt]mkz6y^$\JUF:8HegۓXnRH6,LiD2U};˽ں%рbCas S"j B6,%&M :_bh1}L ɆpUpUJjc9#Ơ:D'h`>%$ 1iQmLkʔ2D`K2 GOxB%¬<̾S蚎d|]"E -@^b3-$"R=rȵV :X]fH. $YJB1P:WO ]Q~7v͍A{ l}|ln +fnJ IE~lO!YA= ^A.QD )$ޚo1r{}ɕ={_@/Е1qz!I*2)2 =)TVoEi e2TޓuUt^!S|H+x[SUl;.A!l`mFW]FxI!P+-B{Frz{֥EBzAI ]q,RHdZH>$BC6m32!C^Y:"ơ nT  RbћwrOy&\Xݷr]O.M*)bx&i;B*J]&SOݶdZde:$ /CՔSD"B!>ܻW45hmsijq'_~,2\s2Ci}E}_9I݅Z~ܡRXyv#MupE%6_b aP ([  $J 6wŦڗM5;"%j U#OQFB8G j(D/=Q0yj0.E'iB! *DI]L7*cz>v\v/7׺K;2aG$ U``I%@NRΠFGB$!\Y  9 @ Gr .agr' ѱ4Fђ]7c~bEĨMJ z#)0 B! k 4 Q<=Q0zqb*"3$ɇ(Bgk6)0ݹ ͦ|T-_94lX)ΛZ$!pCgo$j+2)iE@&E5cm  $dd\ޒ)@rq) cW0o@ Db ƞ1ej W令#L DÐ blL ɆP %Km lJnZ~_*.N NIB }GNB4@ު̆} a(g a}r} nz? GEN ڒdC,uDwKbXfPclFKMѝ/mzӁ4?0wzt)ɡ?-#@xƀ51@ߧ鿶<Ϳ2}:ŀjz6jYc֋7) }:V;g*qc{0DV$V&+9_^ˬ/]\LReV6E$d`IWQ@q@'.ޑ,,$zZH>gZ{7_!,la=;ݝ4 u(E[hYrDO#}n)z"%)L-R[NuK+tYy~he>6Rj*aCΆT4'N3ybiIeT/єBW8ªCΥ)C$ Дml4m\YHSvYkaw6Д+<|;v/_ޙ!8`i+U7쿧6LGҮg=^ޑXV yYQqJ1ňKʼnݹeW~4O;Ex</ ˽s4IN&YŚtPLDYvoLntݭ=ߧ@Sch~I?'wMyc8͠Zct?bkku}gFc̳pGcW.M}>OĴ4!ذAaXh_ +sdDcϱŦ-9^A}!Th7іF|oSЙ)|y(/7斒<%$z]?؇*kH)tY@Y`+q˦o_ꕉMbO1v|ZQt!xpw;h0-VW D9yJ'œp&2Q^ SIH=k*ضޣHP꽽 RX=a~_}d60f(! >a/?/kQ,'w ϋ01\LguZ 0= nvYco٣Y9A_PTaƁo*İ@бT *Oy2&qV8qu~?mjju|՜BBm?_A¡eJ4IU Ъ' $n#75q9DP52U|xcv |c`-ܖ]֟5B",_c;1p+[]YUW]nMNAZ$g+$> M3KoۧS`tl /S $k}<1xzX+9aLMd uM?#+ mel+c[{2"B4ױ&}]:Vyc[V.Ȓ29HS-,б?~"ĐQ^~r i=oV|2]C#UECӍ# ԌmRU+B7r6xERy8,M9yZvz1$Q"FJXD8N0Jk8bDqcp5*kitMHQEf\I(Z0XOZ´gz=kOy ш *^!qiIb )Fi΁|cYw0m|&V4:G򙕭%%yV IA\%+U|HUhr*iJڮGJJURAמSb~ ;URYe9Y-Uv]%mWId p,1rc#pwtSձtlulcEt:~0KVvyI]_@x3kv^vbګ=gK('C\2[r#vi5,&Muiu`JD]Z-9jD4ۥAhI/Kn`hbuW!KSƐ$԰ #$F)JeJ(IG!DIJSJM;j9`t0O! ҽ+X.)0UX]3‡QuJXa=W͇myjϧ\vBCC 8u8ۯ{S9gՓɈ<n+~x19wbQyBl"Jd$Wb+ӄ(#"bp$"%BX8|ga,NO/П\O}:sFڼ>:̧Ѯ\~>@"[UfȀ)q yrHʎ)'`EO95tE@!Pu D&]Vӳu ͨ\7^? ԵoRtV6FlՕˣ%qA8Ro/*ٮ4S蠳jxCLh~ɢ`d\NEyiRVSۛ!~ZESi4h޶6 h{O:S^E/p_%y36٭#Ӵx9JӀl+sK^= lߟF_5 |zJ娚+raʍ_a14P9iTW+4v.u&-FLhhEU qEU0C OքuZ9; Ewk:EUR9'c&?!PN-+ŕ^Hׁeӻ;3~@VOa:_\dKg>Cn}&y&ݧ3;#h[{r|rOJh:0O^҅"_QGc.pOWaagUlAm`/)G(l`ɎVFwZNf9Ӊ槃%fG/!5!$HHԄr pͿucDolՙ..=H?Vqr^ E$" "8p!Fs``? ؊suσnQ):Iƪ#dKXd,~yk ߅Z :qiz#/{_+VT &l#kXLKQ8O\w8m>rMu,p)-V0 թݾ?qk$kc͆C+q?jMPS]Q yBNj;Zqө%γb:2TrWXspjr| iо룊.S"ELx!S/vYхʆsAK@4Y 4Š[]W)%L0TPtBi\ Җv+u9_*XW)ɿZ=7-FZᆹZ&z0Qóu plkݐP+X@Q&ٯ4 GeݮHE_w;S0iE"v<@(}oډu0NˢI}v`-펒q\t`c0&o0[C|Y;bp:MQw2 'DΎ_)v.`|;%XEOJw_xE"Qk1Qb~EZ(F}, >r]dƗwC9?=bn;0o665h\ր'-ͰAx)nkQO^,f>1;k!Zs^>u#ih]BH*`yãzp ov-+\-&Nf2s1J 5П;|dܕѧa R̹_- ~~r#[K&}3vGN %?>Ƞ\^}׭7]"r 1=@ڗN(-$;KDzH&fL ?`Vg큉-wTvc,Odך,bY9iL8j@ӆ \PDhv4|()3h(Hx(5b:$ocbH?lY3qYg"zIЯ3TW-~^ps\%]y>"oKD\#ԡ+] s@0 <8 Tڿ~M[w$.xo tvq#0)o=Ͻ;p꿏={PMrt?rޛϋʤ=·+_ L]"οBP\ ÿwh[(aq.~̽'N*YQ)c7X NW^b֚uͻ7ZJ6' /M`g5忟 ЫοQWSb5<"%.|0˽fy!5l6y[*stJ\!wP2GدoytS靱`}A[CETbC)(bhb8NHE(8ǂB,bl(KJI_㤀Dpo؁߭8o.PQe?fp$58FHT$YFp3"m8%TTHD)\pµₔk$D&bXYI &&Qti(!qBIJTDxzؘ`cػ,"N&mpl:E2EXF3Pf IL+bhZP&PK4Jb 7ol1b$Z 6id45GBFD<L֔uxtw7zʗҔ82c$YBRn24rsQbf,B&(Il_!3TcGoXh bIaY82 H>Lw=n$hlu},3XXw-X30A-llc;^fb(bUd"2{}l&lG[ mj%dsL*!M"#:X1T0 3>M5T0ИUZ\5jm6:1 `T Y6 ^')=IMX.YHj.$.hiM[$kYǠQd k7!pU[VbUFBVk< JI rCAJ}a<*#R4eA҄b2K丌^@s|Dj3t+)f qC˃V;@C>&X "!#2uH*nF)K<;Э1  :syČ:M;rĠT`*ʦAI!0δr*@0XS0q+XEe>dH BPUv 25c`(qH(ʨq`j VڳY$o(Eydͯ(_u TER+Օ(27j{TR1lLbgtU+D44H5J1Z"G DDH[2,pZxZ 댉0Ld3MK٣ۢbD\<1ʣ8Ip>R)*)_,KD08BN^dۀia;L|]/]Zp 49ac[-ku2AT瑵00@ƋA G^*-Lw%^E J2J(dHgP j-ė,$tD\ JTE"&LԴPyE,|R(T)t~q)/. Dhag :7VTnh > ,TɂN~T} yrx*fE <-P9%]VB0NF.}ts0zRLՐhE1+xmk@;KnCz \ZЋQuD! .h /룫#DJ; @e`ExG:&oCU Qqnj@pA9HFgP!t9UDڜףrFP4F,ZnkT8tD xd!`QmBg$A Fi4P?=HD-#;hwEdo !ܨ4̹+ 1"xሳXe(!cH>K6֜?HڳB&0 ըH Pf)UC UeV oT[@ 3. f\M:ɩJ ` \:8 b1íNVӮGY?]M˹I(K0-骅 #Ipd*"lt&vPpnzFŨպ[C58EM*5{8n𠗨ś !u>fTs^ 79. =3)%@&}*P PMI~B1X>ܳFrqAp^-@t Be_j%z!56R zx,~ 33A#-+!-t9E zȕAs!??H(j|djO^S:((qKZsAgHd~"Bi0? j5fC1IXUy Xv)&T,Ҭ"H"r é K]q9@BF r^Rk=];gL0X&v-~6˵qXokt`w"V)UI*!xI|6!.&:U"JA\V vb+t=3TZ|Y]/K:_o -_䘤ҟ|Uعl]os9R\;a KfǏ[U+ѳm1N ޙlF ߕjxkdXqܴ߳`dr;i.t|ZLS. C9LѰ8_يΔNg>P z?sL~(ZcG㇢WCc<PPPPPPPPPPPPPPPPPPPPPPP]p{D~(vF# hߺ (;;C ~;\C5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TC5?TCPL -;?zv4~( y?PJPߣJo~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~j~u``+~_od6[M[/{o=Fԑy,`GVj?Js-푀Xh z$`-~y|vn fcA'ŵJ'Jptŋ/?d$?.>|Vzb]߹hՅYYJX5W[~v_zSu7_]neNVa}ÈI s$=$͓~O&b^Pᰠ8A}spjۘA1KG@:/ԊG$GߨVK~y ~knyPPG?MVgg-m8nt_]"OtWJwQGUA+C—Kgz*D3E{¶XZoo}e2v*xkG{IP6{~AjbD*S[xV鐴$￿ZΓ'e[@t- nC GlbR8IA{@>G#.obbL‘vQ \QC,Jh |m؋7eDֳ/5QiUZⳃ૽EyR3(eۢ1߶7nu:^=5ޞ_\&:rHn^2ܾmS'Hݭ .ޮRgOẘӼ[9_V9f\J]"t3a7{|H?_xA\ zf|67|yy[|ί~͜9cG/ n _M=ކ?/ %_]ܵoWiEɮl,4;9xu߅zۈ gͭ_\Ev1;SS{C<WBZp}W$~n~ay z@wUOV{ fϖe.%/vkp[׿+G~Nkv$Aۖ-6S{[C677tے^<)}T1RR.-\qNV\Yqc}0ѥ؃JT沈:˒* &Ǥ<ؠNy\IJD~nj9(a~ƴx zeX#ݤa<**lC¤e%ȢԥpA pF8vEVY0XOy MDAU v cW 8P?ɩ7 )cHO8W>T(fvII8:]MtfxSu{ |k8ƝpaN8ߝnsԕ[gZGNlX U˜‘ś=2ιRFGKZ=Ji_gZzI_i8gƞ|w^S%qHJ_H=ɒTr0z1N??OwyōA\E+j5vN<̓:ioywȋ}-@4U x 2wQu"c c@ƑYf _'J&֭x+ bMP+l'آbki|?6)'Y(&aȨ5dEMa3gLk(C@,Xxxi8?!9@B):NLqŜ#p=2'bDf"DjӉ7\AZ9~" L ,$[% eq0цxͺg BS=1d ?\5J<=(`ĹFυn =:TZ^5=^kvTSRq&5u -`LE2"#x c\{{rB`O,8rH% .Y'PQ gHqĿǢ4AȏֿJ[ G._h e؆u,fBsQ+o~^NpLr9"])$> Qwʜ8<E>rZ{)] ` s\4 >_AW'HcLeJgBCPqsAuUi@RCpy,IzJsx2|ȒRݲ`i/3⪰zS X=^`z^>T!Ps!n'<~契QV%e @^~I镉cSVt]}B(f:ˆ~$y *7({Ւ2U͓ F ࡀ!Eבf;}½;Yr 7 Rr}fmF|))b-s ̣W,BFd GB" wLH#2z'k'߻Z d5#Ka(2TQ1 #2-ap E>r9W`QK|P='!p%!*, \95f$HF-RfOB`N}I\ue\O2p=/|m;hr_;y8^O*Gd\!o 8JQ\N;ERlp> u5Va~Q7 d<|/اC9bb R0K2?|UgʪЄ7mkΔ&>h$ ;}1eD:@\Lu%hҸӇ9=|Psaּoodl13P#*Y2{?Tc4esu<'>TiS$xn(2& ?<SyGFy;pobS A6PGMuFB(s1Lj`0PsV˅.\CM7#2{1 9ajDFP6i}o&nC@ك79Sf۞[@{Au_~ S4"'LShwOVĤG(<`ɂLSJո"& ALFd48PgBrEUE%Ch`% I”.Y'>Odr<}9( ڎ5rT^}DF9vl`Ҿ#2:N2nP?cV$/I* +g$\N2?U,O2 XAυܙi ryxr] 5"qcy0G_ LtEy8*8aeRuy6e<׃Mvv+[\lrgv#QV)Ph>gi#1|Qs! `'y?@5;Ijgxe):zEʎ͏ YL# [TzkrlUH#[L`ՁXm8>'v+b|3+! j:>k.)ge˿M=ܬWq55ys-jL_m_sgq;S nƭ'L` x茓un%΁^aep|. 'j',B#2zvu~^yboǥL5<%dX$-%$y9^\\tk\k /w%E|ȵPU.J'+Q#ٺͣ/^^S R2''d6pb|.R/Rwxrl͜S O\o1 U뼌[q{z Gdtܫ,cm#C܆ij.Dfk-bҖJcVC]o' -!eJX#|NEJOFd0yֳ#:L:\mӄ|6f=q><|. pÄ.Բ<┪+F cNm5GS:0.~a<8Q\"BLkK.Zp,e8CE>ro)"":")u-ꚏ 9ci.į7nBUs^D15\M7,![PCT;|uSq)-)ɚ9ZV8  ,st.|&-KE`Vx(喽8gjÈ?ppیhT%VrpcNw&ZNO6NpBGvɤ[By͓9O2Cυ\.W8suݦ بpumN5A:f蓌C"cEυ\/7nʜK2"gFvҍu7^5\k#5(q3.qomrIJg KuxTD'm܉reIA?(Q ݣ1rDyW&bnGdt܄˵a=nAW%Ԯ/!\g='Fj#%}Nwѡ@Dװ~ZQI+cH\$^!R0#2zݰvݑrl":Aʍm)m令dT{ ώ;jmJ֡33zl Ko'{po.Vͪϊъk6{И4"Lܪ=1oBWrpܙr9>Xa u ymPtHaI" ȥIpdd@>WcgB]] ?$MY=_|' r.7&.Wd%8ҪnAd 4SxIvG(y px. u2E$xK=͖]ws߿~71x3yxał6NX])QEVL;TeƦpivAr.ńQ(~DFϺ5i>2j]ou2j+Z*.=-6lM}_nʆv59]#^yCHNJG:]4lY7l?=1& rq^s*$\Ga|TkZH;8n4!oc0RDReSp ^d^kQ+%G:qvQs!n}dfADbU3/Wz\0VNXt]W|&-W7?{Ȉ}>%i7Q TaYPj9;兝0x$'\ .Z*\3u48<)r\]Wbw]膑~}sQ?=";X5½Bs_5j4\(iOxL,̽c7D J#Zb4v9ͷ\{ȇd?Nnvێ$w"m%=]`2cY%bYܮD)IPk?5MO4Mv|4ʉ~uDXvŤnq1ŭY(al8sdJ=u[ ]8豇5$e,g^ߞgJ*T Q^ -%aycm 4A䯝 Vߝi;ljĹk  iocDcC% cӉv/Y+{9eAE%FXvAY w@5:9Um&^׋߅y1CʔK9[Ծ 䝯fQ3qЯLLqQn靈궟PR_^"HA9q_AK è^$YW]h: vfjZ{*?f po0}2j(Q."7_̦uâ˯:$+Aƌ?VC&oYTExhM'A7q2(TPA9:it0< ܃>8)Qn_(eɖ{S(fSvHe#@(8lVEi}T$#Gu9[N|}u-}h3_Akoc_L^ʬ?H ;@aVxB ѳ(tǞ?->z'٫LAuT^t:h˱wgqNK TL!.1D "ۯqbXhqӓ0=[yoy]~;{Eջ4 }ky4Ḙ?ތNwp3q9ۯ=G3vӓE {`rBu uKދ%N8pZ=A?7Hҁ"sH  tXqUM/T[DK lG̰XJm`1:%ZqhX'ߨV$ea :- BpN6]خ(ci{wg}EqIB r) RPPPk@WBY646 "xD^atQf4J,IUȘۍÿ?9}zǖz >1T=b 3LyBa^,w钛 ဓLDT85A_"n}z-r3 v|```i]0U Uq/|K!{MCR$bmݝez4o-M5 bv؊1vH1-MP]0t(0vw !>JYD*okxXAd:s48.\ o%{f p<%gP:]@F\c"KN$)6@p>gĆ%963ǏR6P u@CT/pq:`fB|a=2,A+{d]rs@ץ2"LˆBPbR[l(Ṟ픖zu םzBvJ+ vT״]] PQq^; + Ӱ>ZZ?F(W #S@v__z A1t͵b Mev\1 t." HgimwzugAP<,Fr,NLA48CN"pl#uA3cY D܇1a@7yL#rGdD]s鹆ǿԫi.nB >nbdL%.2 $S[~7mOiS& !=~-n6;Ihe*Ov ;8Od9KpUdB|W}d+Pmp8XNu3YU!׹8|HFoh&*cl[b̅1 q$0~ݭT[6t*v\p/gv'z|\ճѝGݨ4{;rɗi\(:a1OGMU=6ߗGzSNx:{k' eZu/1-,iy7aYȸ9kf#'<[I$q1sEPU.^`SP?VmW~]:0p[1Y,]ӆhe:NN_ԫ|+. B EcD9Nr.%BYL,2aZRg=բz|: !4b (OXԪipR'otXa $)9:VKSMN}m>,3kyF`!WK"RT F\ K9cQFq-NA-&_w !g4b v,UJ#cyDfiLcW辢QOE²h?c?Z ށ !mPsLKVݞ,{>C\Pv4n$^Y(ʂir(!|6aםC+X&X!Q44#bR ye ףQYH~")ˁX;dC?x4Kx|ceZ `XX[ 0l~7⤡^+~!8iU\$ ^a+t5mOhX_T']q_ۥK.ٕٓ]3heCtT`'genOy~7yMlI'%l>{4J"7{6hC.ŸFZlB\Z'Sq~8M09~\(<+և(YqB "##_{d-) uV䘐=#`#^uwbي1)0} 0Qz=tT7L~/lߐZ"g*6ݴ:ўJ_Nzw%.wNo9>nqOd_aek؜t6H;ܐaLx#gD3$Iz.УMв, w0j8q}rPAY~e%>kCWq*|3]錙X]0J' @k]TDc74ap߃;Ovb@ vySXă JGYPt:{E d.B|1h0“v!=5e$ӥ2j6ev8yP@R,<ꩾ-etuqf/;Gɂ\?vɒ/|wV7Ə0]wwT~ٳcDq}?ZL8ux;i_֒U ^OYuZ97?5ZBjSYngv 7,U 5sB, +&3>4Ĺcr[)aH<&7i۵8/"Qx-:6\0/k˚ xLޏ3FF{Vԉ̳F.N.&т?.Ӱ54쭛쿷/Zrdݾo-?e6+wچY>ZA߳}XQ.oͅ&hul t}Ib: 離]=Ŋ6lI;1uX͋]uqcy WQf6ģmxP6̑z0qKޮHY=-G=|Ṇ6y9"PQ8ah83t\2/ F9~2 'Ev:F&e Wbܶ",wx\ x+m6嚠$*k;MRdZ[/KitkCΐ3٠W>q7ILI+IJf́BZK$1ѧϧu_]~-@ۂdzqJ$@jT ؎i(\g$y 7>sl|/p.PA* Cvu0!y:HH| *$-˲撻')qe3lC7I_+yƃ{3+d3BCxY+|TTcJ8pU$`!ZHP"e)2Nn,#<}kFԵ=[чػfT+*%2 zdȄА+dyfr#)̺SE.4LC].% Ҁ^vLF)P:8]7T 3O:; Q>r7Gn6_0{w=-fXqBx[zľL %u,\3<ܼpL`:lt`xP$9Y۔sJX/?Jȅae6*vٸokY۹Y"^gޖHE<߾!,.0;$X8] +MMJuuu%JtP%Z1 u kw_Ry&|K6K.m/,ll:/vlMH-/-w걮nSfa) n)\s忮Vs b],ץh(x\rf'_w\pj,Vg.ȕhRB4ͷ=wu:߇WS+6TizaoHw1Dh/+ϵKkQH I86 T}Zomėy*$)|j-pӇXM]O` e1ǂkaSMv[ĽM׃x:δu"wS:xS7p&18 m20d,B{ebܛn4.o5z9 )ejg= kHFBE{0G ,!3_vM|(}L0A X 'YiPJ41&Fn/kTG }b_bcFP&8\mkw|TD\9L(tl$2f/\FI]'go<_<NbTLL B : t O}`AyrU_(OE7wRo^"!N/t` h{h5f4mi(tba01Q,p Mѽ-tDEOۍ̼{Z-U "o`f,F'#^^VHW5l4wP򨸟h2uEEd'e蟋=\,3dDT {vuUxVV~R QR`lg8ldh'oi7U"da eKqc@/pOAY z$b"CKcjGS I1 7pln㢍t+.ڕ|jE'.&>K)~\Y1;s쌧04GT]NPxe: @mrIA;0 (qi$:Q$Fe9٧Vxi"QueG~@uU߃L??Kf9ӯ^n<9 J/liJ =Mŵ8~%'Эf VU8"zmwvuաM]tV8\4qmUo W7W۸* mvkĺV`L3Wn(qWT5-S`d1h L'(h+CcՊ//]LQ0<[, 8_F߷g[2k&xdvi[O1ƵS)0琂"ވkAޡa'֚<l\i1Ff<>-w"CY<}F+{ew(X¬ C<8x[f-o9喬Ɩ\}+6q/B-~}iJO"[/.qms6{]z U }a%pMǣtYpu!OUtX{8冂쯏sF-Nr%(~mx|P^A Ϭ?w2RQ$ytJe{q0T *D7o.zdYZl:;7.-*>*gW?'6yvI?-D(Lؓ39<.i춂~|iܶrOd'N DM~SӮ6;0kγ?3{u{UUxaჱY]H~1wkOEH_dQZL'V WWŒ`UEsS-~>*~;7wA-ۨNx]Aםu k{mKV&n Dh(fۛLu 𭖌@ptY<5,zLztybӢqs;>T/W}t42#mr2(3ŭ.ܸ@pԭ)ux^vYS$ ب ϱÅ ōAЄ.Ɗo[\ O֫W'>5'^ľz$*T7Oi#BrE  Ь$w?㫢=_<[(9KFA$Fyc. oii OKuA|y V-:W(JRˑri -LVCWK6%U5BĻ,vJe,?cJa=qc`V}]E¬J. rp6l,$A(VW:ep00+:ݣuY1 'φg^{+GSS.w6肫&lڑTGXjR$sœ,VlwrZQזGkKԇw9`9Uu˶Kuupmູ;% ZUp] ZZ-"bD3#t,bZ 0Z9&,e/Wn4}kE+N6PM٥,` 0 v\hS'=!?b( r{z[8WZA0ݧޯ[+mq0P*^J5.ŠLtQ*7NwD||ps3a/Y_/5{ULvf%)L[֊Gq%{ ;Arj XP1sM.W'R!N1_:D]T-5Yu+Ul5Lr& ʉ%8CZrRjvSo5Og٫Qr\KLψ ʻm16WuG%籪u( q  Ox%ELߕ|:FGqٞǂI}X=GY6&g*OieD0QJ4x&a{YJf2 &ce8T!%f~:+|e_+A23Y^|:LGs2 g㜸0Hi.Ub!c)OCǡhJ׾<& +ݵEX]4WMb Taj [2N5膕Ȉ L9F'00 )68 @?N?|y|ʹcG׍is.uA K?bϹͩ^ (-XC-O(gVҨQa&a%47D$Bqn`":v1ˉ83fB0RF#9*ɬi&5eDhA #(jbX{1y{qffgSƚBx0^ 1ͮ`xh`'@SAUZ:"DEO_{ސbMPטk*GIϬMFG*`&8X&j~Q]J*@眦1M%InҘjF(YpaDEUꂲ 2'e]hY{-Y Ճ`}vg-b l@It"P׆ cq8L}V$ IMs7 I1 o>rAyA`aa4yzp/3YnΧ?/9π3 1T)Ә%ұ168oi5X*Y\],3KL/8'n!3/ gmADm\N AgKsY+iq!ܜxfUa x9r!Tb,$A,LHc LKb(#v}_M4/-:eƣuʌqxDQ<2y_\e:k2M .,"2&mg޵q$~? p qq~`sZهtj+qw9R=]Uu=HYd\SPA)FQ), H.&ˉ/9Nu60WI\mE>󁍛8Vt{9ڿ!a;9?ChW-(?Vztn3fSc"ͫD(%{Bjb,>8ћp[h;i8O .õSH8|$3(QAԩ_|5J/Fɕ0:z&|25$2gtJ@ L`hRxu.&89 &=6o_efOeOr5?oVO , >%ݔ-*6/DAĶ{)r1ۥ_^%t}t=gU;f:ʳgFE jJ_aZ0so/޿_7m릱6=|9W0E 3Qe7WeU$OVp0kH\:E(A8!H xă\ * [1DUI?hY>[&Spg;JZjGd㩝)UϬȃT"z\XCJ[ Sb6*Ea`d` N!&Lc!\b6U1D(b58 5Fk's7/E\GFcp8ݪ M7/b03^# \[9#18RC8/03e`ftRws^+Ǎa\dG%I)tk ||TQc m( ؾ(~=@*p&V*0-z.4?(k7BjîCzR@?/~&7aڡb6!`c#sĢ#pp!Iu2~!NwWMۛY14E%c Jn @Z p@Go4-ns[wn{d6O\.wr$R$ gf(0*0V+y }})@s! L.pl:8H{OTcmV㠜“ vyMqʊv%%PWh _x>]HjMԈ|lRB9uǏd4Bhmań,rXP|D1txƸT)v8pH,:#K7krZQ hCNWnq3iQ3]""Bbcd/SHǵDOo =[F"lEdvia-b9a6~׬[|*"E~ovԤ&z̦05*8$z j$͙ S<> `*F<]1J܃]ǏlѲQPI4t3% * =VxVxݦ8H|2ڈdS&TIeB2GT9dfy5^SHZ3K7T{=Y, 9~F V *YEpшB5t pX#Tz!+vmXj_ !1g!E=P4\ )EB:[^"GFcp*PcZ)Gk$ ?KLQ;Qv,~@\IjqTdΖ?,Qbk.;4F}a6-rg7=;-J@ե~pNrU[_GFcph};aӀQQTGFcpX)|㘂v .KD , s@v2+7CS7EamW(GrH 'q Zc6vHVH: bYHm%5@$zd4Њ`jkyђY%P|lhh{a˾<,|ULc*>jeeԄd܌%Q|0I]57(z>fVԦzdftj1C˙^ֱ;FJl|gu;x0G hpԌȺlT)JI>!mHψ0TRZl\!7'&IKɿ۸GFcp۸ƍCayri@UIVTDtBԭ7nNWtXtJgHe"~\upJn{d4èj(7ƥjXa3%l|;v;i1b@EWPZl![K\ !S`6NFcpDi WU_.tXMQGFcp@~)>dZTh2x<ZcBEEWgEgIj3IO@R$L zRVh3|Qc'W9\ŀ |"866zd4A9 n)JAG)*?盕]4lYPRDF(/vJ-JsTI W*\ɅRUIPt쨠QGIN'e0l(sgfo%s9` T2<id f%(ޱ٠^]7^ .GLn)krT,-mk2Ƨ$z͠Iv͵\n6ƀrI4qtьq-pՄb;\<4깜8IvB x5_^]W#18*ljzZ5|cHHjaPj#{' aA szd4BA’qa/}z7}-"=2#ww݀id_IAQ{'18+,WgLu.h wx5l SR-5kCRG ) |7@x^,)M׸GFcpܙUpY4GFcp(/dFU ׾!;-fg갈T 3Ď9>;.'#ϾV6kxV֞JiQZŖU"Len8%\WgpL}I{sxA3Z@#.ŧ%GEh N‘I MijbD8[FLWBUh5uI C&sFNnTJ{F(gNXtHFN(=KbUQI;OzI)j$#185L?iQHȳS*;DQ*]Z4Z99bQ7I&seVh Ns~-ft[1'ii9p&-=293a#ǿ,. S 8o>`vWIحGFcpXfݬQ}=E4B+v(JfW/&,vda*uF@QYfx=^$\l6?W^]MJ{|'=0k?.8]_Y?{M<폋ц^k.6O&q>YE'C?0 ov}\Ƌ7gbuoSE<}";({?|Y ^3,]쐄v:fMn^o&?n)?dO~\N>Nt7zW &[|& 4~u/O_ _M`7N>.nV4;+rG H^_Ff<.AVRWi{/bg;ϝ>Yyp\.9lf^P29&x lTշ%H|>>]t5۾CB@B;UR#!.t G%Fz鲳nȂ\\ x0zωe/EʊW6_\e?o~CÍ}/@uD!޾^%ZuT7ȴ&c6DL,IE$ց}2 Zw?^_^ _uP,W_2Ajڍo=q_O*· 㷅 _;>扢ZL.*ot=y?]l_%~ML#~fl,?7ݫgg,5q̇?E9\_lnv=xDݜ*&Y*qas7#uoK3p^B@Z~%+zڃ++3~ GCuRVt4zש4w|QeV|-Fus\m6kíײ3ZF/JZyuԨϿZIS$8T1ҰnԂćVsIL B8,] i+bk]z)A\+SbA(f)‚aIXMtĉ\pN'2Vr+ОED-Rp# *)2PԎvF}?H L܍SRM̨)!pm#j8K I`EA\l퟼nˌ'1zK87@ X[ $ )5 1"}X빠>fnTkhC0n%QŨI0I qb*,,6tj~o5v^4[wN5kF3<}zQjj{> ^ך×jPqGl՗qVcC,eio mc X_ov ZI DpQI26A٨vMP\)BDI 6ԀhpıJ"J@Z2N3,WIc L[Ne\?6vƷ~d5tEfm3Q`y5P#I#43 HP ngᑍ8֩aDoB^8RWƱr.(THi(54R\X%IF߄)`?sRADŰ lhV9sӃ|ۑs_ .ܣ,G7zDE񻟓p}>̖׻ϕ`l|5k.yA/O?Ch?@z NP|v>2Qlw#:|OXhʽ x/ RW}hZYw_M;8Rr=dq x'FgawyƝΑsF.iTg簲F#<86l+ 9߻ܷ/o.t|²Lgc4Y=#{a^:wE,@Bi($n;a1MO+ʑBɰyS08g; ] (pR`\qgSQ`dpz38gFX=p(-<<@o˝sW1 sJI)Q/x[>ے-]x=:uq k⤶ 3' f}{V km?1,d׈>B.VQZ/ԴvLl ժ0D_;`4m=`ݳj6|t,36Cp|;_zַ]46U;)Ȝ*78bpSR3#-)Ka@""`BۧɽF;mg3lD~;뵲>+a-p8Y*c_T<4[&%"',ؖ&+;R[.0;:J?]_W_W/K0'aB cjz:UQ4[]vQ(gqʀemE;dq[ Gn\ʾTe.vqqF]cghSk2 T?ltm8sW񥉏^*96s5quUue'^z㾯X~7}aA Ĺ\vE3R,znIn-HŦ1%G$Tː'͊77Gp6eaxzt)(G,rQu5?m% 0fXPpIw>pVIbI`-PEeZJ*܊RTLG=hBĥqo:`D]4_D*{e1Cm.*xF%d^ t kݎ*xЈ[1~/#>3c֩wxڝZ8YǵXQx=-:\7 ó .'`vXm'.UU=;==?+|G?=E=tbPԨTI ihJ$_x= -evYƇKH[~5SƸ XA$Q(+&*KWHL{t ,s'9P.Ov'.Mh!I6إ(pBU~TOeSL SXk48Y ̠6`Ѡ:܁A/8uG;xpuIDW74 \E_S~PoC0У)ԣ4٢h;Xd]?;=.6V&"BBERL)M;[>}@ ?uZ#ob4my{,3K_h|,?K$1J)8$2Hbj(Swq,e0eRMѠI L+N5w1a4w]4||RwNmhMC#X eK*<,6|m!J =^ԅ0Jjs}|}a۷oK/>Ab%$"(F2C4Q#)w>g-ΜeB'u.c'p}ywlM]S@uuN9}1}r3^6{W741l>|.,;y F`MCAVcXq'w[9gfGf" LӆƊ"PC Ka`a3-q,-bXDfL؝7j6u' La_Gn^2\deOMN_L/;#+f ?$Pz]gK;2C጗O"s'떖ۛJ!T . kGӪH2b0+#4m.UU?睼6аHSE %Xq[’SK<&:"":!J7€L[XnrM^ф)llXP}`l$[ˊ~?U?,3i=RWxXJ_^'Zd -nYWPsۏz<XSx_t+fzța֣CSh]]޵q$-;@m۽,ю`I(ڹ0~:PCJVIj;CqLMUwuu]|rI:K .O*Feri&l۷[J?W20 lhxPLa.2 aLbIh\ڭW K#7 WZ]k%PHLkB<.hMĒ\T' >ipDHn)ʹ o\ނSۿUjJq5ZzvպtZgf˺~7/F jFV?aYs6WKL<Z6>}1t O9C ^WurD[@ !%.R7D*V"MC6XFkP&J%@X%#}y OyD`5M(`RXx8ZdH;j a]`VՍV ߄B ᥑyFv) P.j +% o,J!"& Õ857PBxmd1WcQ~c 3^ќzD 8݄ÅgS9pL"k*%[hBSR2 ~u ^SLXIM(Q[|HG@:*b%$Y4U7hьOҬ7~TBϋmNٕ].sl}=t] ]n%-7x?LN(x̒*˥P QdY ЄB 'TKF ' A?[D$RtuS( I (TiMP*+MkI zk0&J/x9U+x0A28 kKYa pcEzKf C`,K/jQ!b s95)M(qU:6`/5uIm!xdI8'lTxQ'| %BM C$ MĵH kAi!Bb=W ߄B KgMX֡4j| %L6x&:QJ NC4F X^#| `UC%n^E&%bA42偩AhD؀(q錂jOywH I '&}\ń x!e "8ܠ"'20셍5P(!Or@_aqF* )t\,5P@x 3jyc4oLzlfhP(")*b 9rA&DLQB@| %'O*Jp幘 AU`:1߄B )BhDB޼Ȍ1gp]jo@1V&zLDpH" 5 lM(QKJ%F*49b%H1qR]Q&J/r lNxVA.YH dz5 7VD\ KP$ v gP:Yz]~Xk\FuX{Qr% RcX֐S]!˜N˂Kъ$qY[|$Ἒ0w(K̿[+{vPo0oŞKkT fMy.'R,s+ŷ^,sycs<_<7SF Ҭ.P+VR̥s4W\Ô1W]\enU&61Ws%5sro[uyr=`]ڟ)MT*7k,kTDb0JT+OYrQ'F F>wׯ'#ѫiG0RQ[?tf^u:Na~p"2 >EGz|>T\XB>76ʹ3rw0ioJW2_~4i<=W*n4t4xUw>}i'Ud{qew=zi̾\[e5Ku fT^rp}&IR1wWO^gIE]hÉIRa=|5{hbp,VF;']0}nIHeӂp;':zCsN/%t\.$:*k NnQwoAcI"& et̆$G@a0㔬L6JAe !>t\-ONzO'0!Etzs }x^tKhl09ƥ%*Ŕ^$:*<aœ8/3Ј띟:;O/z;-9޳N]xuzn[wgb䪥O4:y JUxl63Y@Đ}1#ԧ h5연-ŧs]u4xUnZ 5D x8哸sMw~TuA.9qێ+ C/]ϪbheLQ٪~˩O˜G9 ܋3Zfhaj Oۗzܝ^Ng׿guݺ %Eik7}:t}P y _,d0[;C;8囪 7_/n~[]˴hULo ٞ5656oO8qH5:򋰵@ u%G[pfGhY^ϋYCgyU#_`W#] ƕ+l*mVd]+RZQqHK}_<0$H"v*%k( ^~/q]H T~G=3 .2="N"{Q$ 3c!b` oq7q <(rqP֙ "[s:uZ~k&i@szǠkGwQUC-0UN'`Mfbk8w=i|Ml%4Mi¡0ѩlqm=m؉F7&?O9I EP&#* 0< ͞eK?&{,?ѻO/|?? "= ԋu\tV&"ُW? zwuo~~3WLJ&+t}ܩCB:!&撱;"MÈ ~KmC|:+̎/-HS'ѿ{EUdQ1'/;Y4Yȩj'cӰ@V܌^K.cKN*7 eSxDyTXkPjsjT*Ɩ@hpYИ1X+A dݘ^@D(F Ƃh$#!BMZȻ"#+*bD2cqbvGRSFD-HH(cE` !UΚe]wP[gd]rl`ny3O7o_yhi<|5`:ץOݱd;~gn0f+.@a#,FH`X#%SM/4ih$ {A>n6v\smoʣ̉;+ʳ[ Sf[l]^~?ӌXWA;X ]~(o5("ڦMoI"=}8I | g=ߜXnDE}ܝH߲?vr.R?܍5wo`bٍfPRZR91ɹkrNӸkr&s٧ØkrNMp&S}- gshܕ;!rg}k? AEܚƖ–-mOnPJҟasr~/W7ru\,W7ruN`8Rʹvhsh\! ѸB4ͽV9],<1]KiZ&OCWPr."%8<`OA5&hh&&A` e;oR%Q`隚-F~|%Ҭ@: ̏QbB(Pa]z^_)hOSblc/*=tBA LBxZr_3|u,D@ڏ/. ?+6Cmmoha9ʤLZ|i']x3|GHX"yx#:&}013,ȁZ݋^aIpo{7OPS\IN,G:U ÅU'ڜ=䔝B3p1_=Ȼ6/{+,{,cAgfEI8+Pm5g\P`N`&/O_:m AERV)fjeK1"'PaA)*WOQ]кI,w,֮*f$V`:FiOv=Õ:ǟdA+tC9r j (GZvˮ]xw/ W}-i@Xj1rhN&\{uv1V`d!Ľ`3"7 ^^(4 '{bBIf]N嚮<矂qlwMӏƜoKFw50@sNՆ+AR`d:9/M].:x(o(ע]\նgv-ނf~tݼ=ho6ЌuQhÕo8t8ǁL#B.h^[4._( Bs0YCȲ{UD̡mVryEC3XQ)Λ(fpn{wO&ztm^˖7E/,#[#D=%NJ)+bvȽr)f VB5{$Brv'ioѽz=w7"_r651!J),c=0ܖL3 ՙ%[6Jݛn8@Q04+$4b+7m8vLL\Qss;bjf.4w;)TO#Íco?O[џf=]u3)N{ZLY&FJͪ(rhZn~z\Wy(eӭ3= \mh ZnPB u>KMtea7EW% ʠ$stʼnFXDW#{ԕK-te֫+cGWHWL đ`„`N4Ž?MF7Gx%W|'!5v ʧ ƳOVN&qc~leviGC!(QG{MGj7mv_7mrmSGs< -YXU4Z-ݡUC&Ntu`ǣG8tP򉮎NO3К=-8[t'uզ3)>ǷLb owmSjl+>ήW01xjivy9V5A0I:Я)b6?r+uvӅKo^\OSk*O}P4-9QXD͋o:]+|1g/yg>{;Y\ig*btqyZLTG;3K_v;hWdL|;Z*70ujr@ɟ^Kw&|F5uKtGŤV[|ֲc~^n&NKȋ쾵Uh_NnrϿ̐Juۜ}-&|2~ga-8h? ۈ(u||e2_Y_wZ&??.Z|5 V!tG of1UďqvIov6%9ly4Ru~,RХPZORR~X)J?>&i>`ib$&KO{q'_X\X+/m b2\d?Vg=蟧oγx6_ 1ݔRy\0R?_lN3ٳQК6l`[[seg }7{w~蚋%1\Ot>-)9ZYl<</_|y@ ]1u7}4i?߲X_]xF|ovDvtk7el0h`[uk7n]M:n!&[k`O'-MG>:&:\_7q(jmCh6x0ȯ|~X0o;9,rwλCrT[m9z5P'@= \v_8|0S{ƺ(jICQ#:!ZR|`?DOMN1T[J{paO͔oz[\5  CeJ{3{3eH7M3dwޟ!ޣ6-6ڢIOY1"}: W9~Y &,oY@ԕ-$ ~̮j;}ma>-̷ Y+D"bw`Un<˼K)) ;;{K3{}沈ZCw\Pt;HR1?fjUX誡|tP:ҕ`/<$hZ%HDyz2JoĘo ]5~4 h<CۜDW_zA#GCW  ]5A1kFDW ]5$BW 9xj(݉&yDt,ƣ\9oZ҇NW =M=t7t?y fp,Ğ~Nuۡ+]]zk1ѕZx p ]9|JG':zѹ+fuK vdy̷/.>qHooy7oEU?!^_KU@-[wTϒo??jtA?vHm?,E luǗ<\Lo9eu[K솶:D+d%Z:Ň ZyaFrYJNq_.yzoۄ[|ȔqO;Ou_f㷄qOU]}b-^?,,FhKP>l_Wғ8 ݢ6ϞMuVGyt'~Z!/1ulKK]ОHM,]Oәv{_neKPĕSZEZ9U6zZ"2YS-i kW^9[H F|yoC[س*_Rpɉ*;֖Fa4I gc՚v>輊!EIPYh+QƅJYq`T6] gli4)7n!UmSJ!<|AG5A*tQ*9á`Ջ"#X`,H>٢ӱ8ȅbߜ7{!ȖUI:Qb%rHTgωRڳւ䪗1(!;Yo<$&/MuK 2M7)p@+[+I>QΛ! X %$ZA_"ħimE爐1;E^'i.&f 6$%Ef+ke@憠EgE† V@ZV YJBuP$ZrAuQVm)$2]( mS \DmxH8f ,笀B24/Uh;K`  @dX56L1Hc0y2UpܪB{bX$b'YeDXbHK_-|aϘcb*u F@Pj ơ( 0hK 2TJĉ;(1*B *>8/Hѳl[&UXIQ{O Mh \ekGCY%:(QꋐAV4~K* :CUFe΢I/X^ 6=`̄ Kȸ@MAC{aP C9$X !@YP&:$mjLR"Ɉ:Ϝ[cT 9 &̄ ?%(0 |+pPgu]kp* 9lLWi_ mJ7eNhSahDAZ ] BѠ%@Q 2|\@PShHҦlD)vNp/u $}YR,R#*FыvU AB¾)X%JL pBi5$Y*v'PW\C ]+cKqlmDЄ~!>30H6١EbD\,|1Q44_(*/N&NR'x70,bg3`+cixV[-זßwJTmsB#5u}:6=Xm, ƋAy@82*}rC6+X{.+ BKu20n:&abgQ\?X|Ѡ`BGą (1DM˄ʫ QQ)V|?XrѰ<{ `肃 , uS3< oĭ XtunP% 9UQ(;# )@t9%B_V!Hb'#*a~< W{Q'S8Ir %XTצXktd+QwP҅MdUs7D.BP5j*H H6QᡔJq -AhGM;f.!z bWӬeL/L.ٴ&ڍgW  {&ZgL[3kn9` fQ:X 6ATCؘ4{hwK(8w)Yx6f IZ{j h΃DɪA97f(.,o95(%9nHJ4𐗨 ɡM>fTsC <ВCqnD;=hRʥP ]'T*"2!%8TU?{Vd쇙E@c7=0nLwLk %$;^oս,ےmٲUNuH:$8=;)x&oIB u_VWL"\x>\A\Pbr Ʒ0ouV[a w((AT( q#>zg : 1#dFjâHYA?zVcVXZ#eZpRCYX/:O{&I D 4.k[Hvנ\k%Pn58}3QBs`5D׃e@(˙ dB3|QWB?)qt#1>D`8o5j2C)(āXbJ8I@ۆ`PApi0iU 9$WZ5˥#D``.cdܬTxF}L $aKn{R)hH0K y]0`F Neg,x3;P5?R0]hڦ6a:0 DMXTUt&U02_WN>|XʟefF% 0ᘂg&-7b[.j:&Zt:-sU|Nw09ht_!}~FuS pWdz̿ꇻzd>]ES;\Q2י9Ao4YWk>;ĭJ` ]&0$٠ύjth@-W%zLV|D(-E$8+ 됻B;7(\DW}p QJKjݕJ.-hΜh`rWب,"\ͻ]Dhuݕ7[!w"\ٙ<4Dv> QjZFnYիlؼuw 7D+_ggC픻}c[鐻C wh{% Q.E]oR/ 8)EzRe/T0hP4j r:*U'}u? *3'F<ӱjbjQD$˕S%DOd4cS'?K'_4âY% T>‹PBP>gӱgгp^uQxxU+37D=a*ޡ;ٙzߕкWR lRzh1q%MG1w{`l M#s8la) Lt=t¿›8W hb\1 9חqktvu]!\i%w OD] ]3 θ+Dܮ+@9m2^1%wh\kuWܕ񚉝Ou 8}tW&:f)ם`zxgrNr&w73!θ+lg-ڻPId ]v=?`L9f_z]wZJf(n zls7u]!`;kٲ!禮OwW3rW{gu]!`m;U]qWVK QIjݕfIGv֧0 5j.1ݽ8vq %>8 jk[Pؿq okQ{ߍ4οϳ2 ϋ~^\ܩ2EPY|TU0IT<&j̓oXٔ|U€g76/2ł׼> ߺn 7iGm|QY(7[ŧJ%?"DMsz.Tq6k:-b0N݅S),XFG%ȢԥpI8#$T*! L4Сp#<ɘ赶J-@,,Q`+Ws;Y^f6"L?;!fqzw" .w/K.ʹ1#.*tLsZQepƾVUPз9MY<ʽ8pa8_>\?0C-Tape&{k\)Z{&6!1镤|#00̟{9(ac3ܗN˚&ųsZmM>}ǹϛS0+BqwG~/<hAy<Zb Q=zg7pKK8ۊpӃ tfuRE*<3$$p3 :-QÕ_D{;Űvfr'wrYy@)b:!k)J Kz#Cl (I)IV*e&&qB,S@|R1.TٳEο0nj2|{Кpv_xx|str[.`]y<+1*kem؊4 ʸs^Ġ %ٿp~ߘJ_dB`7̂oM$,xk_RJjNY#Ct2Vq pƠpOʰ;ק`[U`}gUڇw#8XƣIxSB̀ [+caVF6Ä?ESKMMHOevcև\sGʯwϹnN拾*{&NJIդ2ĵ(0~O8|XD{X[tԦ1&](*}cRs#ljZ5|m6ghYz8ؤV;Yޖs|cU՜涋bkG\sZǽ_wbf}hrRQ0\LOGٲxf^͒=\SAlvLk$v:I9))ge{;JP0P#C3j j-ro*ӧY}AYFKsyQ`h>M0m>4`fք7Ur`Ʃ[;v9?_¾Ꞵ;}SCbe՞%s 4¸zg)Th+ frLsw;ɍk7oӓ-[D>|?KOrw/\FvfRV03`Ze&Y̓S)f~sy0e VBK%sQ5C] w<A(a28n?m 1ͮ*)XQdԸs>yiw+k;5~£W;pҋ\kVf!@x×.$V¦?9%!7OI7pj3۟;Q8iwni?p.8#6Jﲯ4ŧ~obcZi !H8OLnU-MYR3f|+'{!Ɋ0Κ WGOzZr-\P:~4t=_Ѡ#f}jI*RuRW'޵ml0 n(a &E{AmŐڒ*NҢ=z%hcEՄ7sf33oI{a"EJl9,IK7#0XO24SNrfNj QτpzLdtKZy6c3 {3ȣi߮n4hw[_mX6[{Kl%&&sH9@zSWs)mɈw1^2:+ 5t(K]Еb ␬+u8\Ml(th-m+4e)]+ͬ!-` >54GWV~5Ԭҕ]`.e0tp6DWVPvB.ҕJq]!Akq&t((z6 ܪk~(]ՃˎjVɺRvAW]EO6$+K -t(=]uЂ #Y8]+&,!t(=]u/1bfn%/ԍ0<_4"1dnrwW>\2I@ Vŋꡕ Բh:)$6$B6x1UDXOW+)urkMo!\l` -&JUiC 0S<BBW֨DtE2{ ~:J1Oo߼99NdX^\JdX,NigPPI@ ep(e$EO=?"J{ õ6JCWZKi QC"]IRDWX1 ]!\uhU=TDizt(]!`-+ke(th)kt%Tҕې+=Bzh2J;]+c!Ů DާZkNW"]Ya\CWWjGJe۶jE/ΏOW rTzp}L=JRҖѕAW]E!f:B>ZNe QrUJIY ІZS%s ̴0 Qq2DPKʆ6!kNm/PN樮q81T<|)ZGhٱR,=x4'ߌQ` ZLXPJI0>EJR` ]\FL(th9m+@)ږM*!`ÙlEL"Zڮ F} S+˃ "ZzB.ҕU`d]`iñbϼEtlJUU-ǥփkqZCDWPYW]v=U]!`+{-+DL Q=]uTib }p/r<* : )@ӫx:0C+ r ׋KO&i "Kbsę:&bxCǓW71Oٰhtg󉛟E[_(cxbrL|(Ki-c+,-^s*Ǔwo*?"17ϠI'~@120WV?sE>aaߨKOu}11Y5αnFU+ZEJUwD޿._R:Pi7rP?@_#f/_ތ?rk.l>~E[O-|VqG>@%ye5Puc.ȶ8o~lsŸ/ nJm;S^)Tb s<pr G-!:\Or@a8R[b \B Zx 4t0^xpQ1^Ǣ5#h/HU. ;Wꪠ"gd%JLݰV c{E=n \E c@Pe/#P:b [qG%U;`d'd~q1.*jeWvKLn)<. E6ۼ?.nMx,'vR99Nj'K dܧʊ7Df.k̝.RقqQF.uLǮzV{Mj` f54S2Q"~aiB㦚LYz:rO1EלIv,zsOZpXO~5q{dK-hReh#CM qRѐJ N7IӬ)k&ylk>*yxcRZTCZb&Q1Iwd1jRZk*E4}Z^&ÌeV>EjtLT9riʹ8ւ P*˨ %A D)]L{KW` cZZ-m+DtOW+1!6WP њ[WR[B.ҕ2p]!`3pY:HW h{/ 4HumK<91Nz!:cb!o\(!^sʪlN\3 MʕDK%#ȩNdVyB Sdn>ڲ;0,V,vyzǓ??~3w8>+\ /S)5%.J)߀^KS5/x xwBk9_w#(] ٶGݼƒw>5X a]=@OxnO7kҒוּ,ެosv2.-\pyqytuqQ}iqb⇢~}C#CU wJ}T |6ŏZ 7v?+k lym`xCwnF̢. *n\^L3Wׅ^)؛ݜbZҥcr|?X4|zjj:^ʷ<1v9|v*.JxNZW^-wS@>j6OfgՂBj4Ka5,!Э΢/سƔ2~5P~]f5pF66ˡ}𮌉xOg&'LZTOKu>~ysOx Kz۪hsd8w ]B`\tۂܳ-1-7fmX=lB6k1'0-ZfʹLj!9S10i*Rr'KYM3`Hj0[%3x~GŤcy4%Z]0AyG`̶FMc,fN7Np#ƆhGVk l|ssᐥ:\$:p tdwyyxt0rMTf(PNH8i+-yR>-yP^Zt.J{=G#NxTyD; JY:[gJ$)clqReL ˅mԠf4xjԧr4Oěbq~/,[CD89_6A&F_9 K!!X,ΆեKB+|du{ORyB-< %tr!F9:1A?MX:qD|H7?[QD6\7$f=5q5kك*ojG0I csȖ:БсFe - o{K ۿ@ɠޏڀY>oeFũtB)UP LIƨih B/;5VR"iƁ"cLƔ! zHdI$2gí K?uYYXn}wqH.6hաDi)PqC*pF<'D`3gS8Xb3k1Eee"0&ع8Y6#!lGTFAm<,tI>NF ֽ'u@B>&+8 ZS^e Q֣U"x/?:mizi&gU̶$<3KNFF}¢Ne)dǴ@2v} o>1V?[ٿ>Νr \Cȓ!GY-ggU#x2ˣƵ|T]?oi_/.'Wk?.~??敚[եւPR*7'~N~_*c.n|8$˟_4ݚsvM•Kv1MJCNuzsIg  m1|<gUV\-<͌'ߗE1{=ۓ_tQ{/$ИDj9qVET:έjZ qzV'=IOl?%)Nkw()HgN?|W|seJN~g}.Gߴ[otѬ:G-iVoN@wP:~WW}Uf*U8'oWeķP/?4="d}WYWY n J}|Kd_#nCenR J^׀ynu ˖Zd"EA& N$M~X/gdR/񂀗UX.Bҫ(3/ي [hUqNJv_&@afG׏lpNn-* "hE!d],E~{T`o>JwM/5nP氧6'ϦY2-IWSv_>zt@TDxŽ|*]! s t_NMGZŪ{!(V?ېަXb{*FӃ^YhoL36/ƚ 3b 3Z;aFl-ס3VJjÌqlR.Ώؼ!#=9TiE"\ p#opӼ>d6F*|NԎb O DAs.ʢqOMn;ZYθ}{fq?{t3<-x,F辟>w?9.i+l M)t7\Sn[5BwSn M)t<)t7BwBwSn M)tfMMPBwSn M)t7ԦBwSnBwSn M)t7BwSn M)t7BwS>6\SnzlM)t7BwcZ´I] *yBPZ%+tW+i GM46BwSn Ms\!id&|=vwEqǹ#N3/_;I_{(YYȶU'CLz m@PCі;u6ALyiqK+ hI1>KT$lD dD :!vS -]H,iPX"TTNgy_?қ9EWM~^̛6w[?AAP_d!Pڠ3a"kRyC6FB$tچ`/xP \laߚa-z(ϗtJR]UV[)vƜ]>ڸ7tK.d绀`lTh6RSҵ{?*zOȩF'"&{t xlP0#B*7C2k\LlLNAPҔ3>K2*/X,^fxu)2//^N?=ա];;AC{fG7ʛ{}Gb9L]w(Le2FKPe`DN$ j{3JHu: !LlDZ(cJ`sIbHH9x`VŃQLz3qrĘ8;ᮂROk O])Clci< KppxiN3Si4o\*uөH}NW '4ƪ}>#Sfq7a@Cph`R {#˹߶fOAڃ7f_f %,g=$:dp.K(DA]02DPfWc ~o?4No!61yc'u)sLœ䀄dH.uzǽݻKפ)QW'vx6d{ex}ϸv~h-^s'|\8V8c~<~~q-t.}i}u4v5z>'deiRlY?/n>fN3mr2e1;THYÔ%,Q11ױ T ,hBʾdI3QTNg`vA=͜އ3Cv.;͐F-L]=񑬣f?AA'ŀE:XT鮎1Y||~0_A :&bVH): jB '/qkǴ7E|qGΝ2.#vLy| >/ 5ۘP`ep 06ZYøhֱ63n3[$]dpuAOI.Jb IʮR<).-><ʽP2m9zËkMh <+YfBCk#J9˙1=%˚YE.UH20o=yU = Ƞr^5T5$u|3*5!k.rS.Q5/=~|p^(.Onq"7(5PL46`͘'ifL -KQVhiu"cV3s_]8{RB b.+:M>*mEr޵$#H6,z(gzHI)i,6k=LuwW_UWwsK;M rT; mV E.ґX*)2kHJΘXຽFgC;.}v1EۯaK ֲ3+t%(EMtօ(D-as9&{Po[ ȕR2D+j`c&R:s%"hT+]Mq!N5ݞ.c!2]+>g|7JcAEr%")##CJ"8F.M'g"HGIkNU:K!6MJf5`meD ez+7?~30,|0bGf]mhcBwNKQ*!W jeh 56J/:^zVw>ԛU*X ,%[:Ok"`Zk;!SW((q9+|:-czQe US}(FA} \t +YCf8Jߊ=z\F1 ghQؚ `g.Á2ʘ)2IkcDCzNs1Tf?[fXp;Bߙ_rNsw_f-ur6+4if'g-u>j>- /绛w_8]ܾh35".EZKBIFxOp'~Ӌ7M% ߛi-;ܾ>Qj}UMsN,Qv*y7BgnIZ Gn.` OyRVCvhYr59oRj,@"C.j4䗳p}r-^UOւ=N-TH:V-ׇR ut!tfrfW)Ge{7?% &eHB 6!j-ŕiU_k3r"y7v2tv}~7M}.B}exQΚbmpBrWv(o>IpueSOb.i^}s.3te>{_hԥ5-]0C~gP-G}[AO'z9OAy͢Z'$fœz^$o|o@}:P>wrޢ3,Hr^~0GvfA?cs .^x[&YQJ5󻜫A%Yp *y hU^;(;Xx"eԶ8B~1ڊfJT%$%2j\9Z$YOnۭ~- gXĴHyO15言&rT@]?*R  p,x\'4"䔁CDpY>H6a|Gː QY*9$Cɢj泪!DQz\*F+tU#gGg}! ϭ2>9lң f̋'Q۵w8O]/5׭ 4 "-EjF٥#syB\OU#/n\G$Ņ.bT,ȼ6 2ƂBV[[HT㼰\q$SXWicMҵtIjٍʂYBzgA_bAӓ3q^,mn-o'(:[DkmYfޠYjoqrT*vNt BYV<9:#XY0Y$c/%-%$-iHͫa وTI9-Ud[ 9jŖX+A*@Bd*9iUɐN&zrL<[=8p0ǮP!8xӆ[pc6_x,N~%ڪx`-7M_HTs\KG<zg[Zh"ټ8?Xi&Nb &՝#TdqP#Ag^w0Z!1w'H'.Ѹ0fWufh0-UmnH.BjV?]mdUvۮ~3e?xI? t'Dȉ f=f>BoFjU0Vxg](.$wR>13ewSqӚW>jz%lkOOf0_PoCٟ;їU֊]-8ˬ4ѓ" .HdЄZxub ~BH{YOfpG%:ŏNrǍA&!&{*F Ɋkӆ,`MB蜹Yw5C$G+х2$EBX 1fVU,ZMH}ۏߡΖXfbʑSZ[J(pI_J(K"R,C9]K0K26RXʪrD`R\,y.Bzji{e]Z'B6 c0V6cVNƣotLHR$5ٚ DXL{}^]@ePhB,-n\NXhRڤ2CdCMFH.UVgkm6_ WjSh!ؾ@5B93A4\~(WӐ& g]HR%ֶ nX-@I#dv|- &7Ad$(Q9Pe? 3F[T흨䪗1(!:lnL$%0EvFP>I A1@ yPXgU(xGmE"6xeXJ .MV^ 똌' ’-O`׬|x`ZSΡMK28+BQRX2AWD[Wa hty2Uq ۪B{␰0Ğqk P`%U33S˺X vVSWg a5Fށ eh%6WxJ`KFtjP) c [O-8UZuUTI@PXHyd0h\ŏ)+iW +tz\F`B]^(37!r% 0.`SAP)g6 1 #/ d ITȦ䛃C$;cr @Ű[0q=XgҾJ"L VJ!()0Y0J8NA_`iԞ%aZ 4}!\PSHGW&[+ tj{TR}YR6LbqыT B)R0VK)"CqBi5^V+$QۭV^b-|IZ W])4ٻ8+Wc?z? "a#dS3Z)^ܞp(rlc%usT'2ЙyIrFcthO1#/EF̺(NcDBmU bE|_ܲ]-^F5$Mo d9@X8pi?olKAVLAG=(]I4TR4P<9?X!̤BBgs՜@(@dUʫ z&deZCP5m4-xOx H}x|Y%P Vz(28pG]BI,TGWAZDYьmV D w*2}*Gn߬tD>"CO:XVXkxhDP(c ٣P.O9 T@@P _|CGP=n;t ((L(۽#XRr*a3m>%(ZuIBwH ԁ *-f}"m!I9M(-{Nw$= ̓BsH!k?XTԙ$@fj2RAٕ@?A j@!oqVUhV#X(&a!dE De#@ DUb|؀L;4B:liPF+ h2+Pjj5]ZzVEu,e4JX1lP_AK"0A/lmRiC6?Xv=xy9^":/k5dh[uL@U@ԭ#n[@3 .-zLB`w >2;-,Zwmk*޵%A'ՓGӮ dL B4ɟo rR{R@c8n蔴|-ʡmM1WTs^ 7"zh<^O)QJj]T*22C4Q%fTi SSC꛶ƮXB?v=+BMOL1. trSpr;XIy/f Lo("YT1"&7cHbwvգ~&Z @NjNa¦3^L ipf ܫ񡒑8QN=:A,iJ %* JCzul) ݌å!R`PrAuf5$_pI Eނh \]euޮhd䝩y B.8! T GT/ݢ/|gJ${ Fd3ʀJEҲ^կ~qJِ `0u;9ڊ si{u@:AF#\mVNHc>NH_fBQZNWRɝg'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v9 լ6EZ D7'E~?k2U:B@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; z@ V l@6wikd'kt(f'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v: ?YeJ'0q!Z (U`'kt9B`'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v8nN7;ޫ}ウ]̈́_KkuwTg@ ɸ紅 a6%dro\B0zƥOy"=# \QQTኢ4•*Z9#"eblun.pEFyp(dzp<Ϝ\!(l5z.pEzspEQp *j; BQf6EkEXfWZ8JrDšjjyڎ7G_~4l6cSsnLLx7VOO7f8?{q_q_6\]n_rE!818Z|= sK)@JDŽH77CISDq&} ^/ԋg/NS;~Qg7Sv}pp|SEiY F B[#gWة]Q(Z슢U9Hguj.pEц+DuzJ>!/n|{\ҼDDž^vGF^fEvA|\I6 jFp}0+I+۹EEi++%υ+gĮ(\7h]!J%kw׍ͮF[doWX,)YMx;r[O?l.d t7Ƈߧ:f\#];O|Z5n֞6})aB3FZ^3{<ՙi/gxۜ,*>Eɽ:fߖ?PK2Vw 0C w~(o[8\g7tmOsI~rWi'og >}$OO|40vɰ8v0^y/d-`i5D6]A+νڽ 8OޡnOr^s/=?w69Rmj֨723 ~Ӌ%͏d}Ż?t{G|nI е~\8p=eqzC= b!Wi\o~vRVqmJBR.[.ppV?<MȖ; #f.(Oې1ߣcx;"Fi?;(Ƿ~:|8uk4uUh+#&G/KʃlEQI㜎$uʹKBBJ.MWQQe[OFvV[UV2{7V+giD5ڹThp'GDhvP>->N@x} X~Xa k1oKs7;ýD0:m E<-lw1ӎ֫.8Йs睦nw!h6w3~8X5s e}B(^Vi%yYEZ1Uׇ Qk z=/3;$\5($tn^} ovd.YiYGIV-.NOom׿m; BEֱvt$Z>msG #`Rj_!؇Jqbq;Jb}wWPW"y?&]ɻ/.|%ŷt7E7D I׾G~?Z=tv*<+R啦zOX?L! 2::ڢ_&96YX'ypGӃT;e) QΪ*SQ*J $٣p7[Rqh ɧvi^2mf 'Ly){Ġ9$ks5Ubhj3C4O&x﫳7hTV+oHZB4S7qy7FR]ѳ Ms 9 (dߒ&1Y8 =I,cԗG@Oʀ2qv0Ιw~G3r=9&NxzRg" 2*Bn0^KUzA8,#dK=!e -;Y6ޅs?;VkBƛu_uOHԵ}E}TڋlK)]Or~DO+\d0>+FEPЮiCk>8Jwy`?oߟmy/A%T;Sȴl,$j {*&ĵׅ⶗dxFp , |?~Hn"c]ץb+1XӝPoͺn]ɨJрf=%Q{t#" BCM"O|fg6!ފ(C{"BӘE}t")B!cEЩ&,t%{L=dl(J,C;ĽþAЋ֒Nk'!xȰ9jb۴y쟫i#w읉$gck>uމ)#(xL61~(BΎu ([EO @:,BC[!L@>s_?ͮ/9 L7xqk$JN^x0*HBcZtt:u5*q$BSrPZm"@»΂ѯ2ɐ_(YHi$rG=OOW=]US]dFԮo:6B#rEؠ3. &-҆c-i)8[7Bh_N6,U0҆\Ɋ,5FrH\+sdd~PŌL[UǕ,! /( C*AAAMBF .atvZHp(- 3|x8<,eݻUfʐKhI2r-ԙ+0+ J*c GN"Q趦ښʏ!,(#xiDF̓ .BIFH<c:EQll o>[F?;e7guO.upKXY g Fr ,0W٨GE?~5fbx1r2/+?. ?J>TGjny[M Smύbkk'7 W%jVC]~qԌǍWSH~_$WIGoL`-/sQ4+i深$dy~hW߭ǻw|8+UKiZ4l_00;=o\eq۳.QP֤eH^:wrw~=B{}1[\U JY^d_xoSeѨjsOYoyZ c̟7ţ ڶ"b`VGFhŏcdS7xZ~𱫂rx 3M٥mO#BWV+BEI:F3TYPY4J)sr]4YrL7u"-nhmbmf`~w0C07Jm6fUyfUv4cIB}ontF垌J`4oTnzgj֦\&],J *Y|>[f]ʿ)vK`L1B fǠ)Tq2yT09:%c/ Cy Tw ZR]Fũ(bGY]Sw(Zo?6 u) 8K5nuWŋčDvWǃwؐZqxIY]'dF[]m} wom;kON*V~hMn9ǣEvH/pQ_UϽ~՚JEd,M17>_&x:XBRfS<&_mǏKA'\՛iiK.Z`a[eXu`iji&n)+_NYZxO?eE(Qv lK4LC B_9m|c^*HoŢ/g7dSnQ\ ×zޭ˻*@ˡ\/cUAc9&H"Gk I8{n'͎,I4"fZ: $j)0+T'9d2;'',1 1($ht$+:kxzXv`'AN(e$ 6lRBmV6gRj ?T%ti%Z2r")q89ZUySp[OONٿ",t `=y}j<}p`4+, s5J:|TD[j&-N/'|?cчH-Opq8bsa껾Q}M)dQlt4qJϛ;㮩C6 <2Ii)GV#Zc#0.[)yХZByoG}-4M޽2]Z.*4~|8r/2,\,ﺳdpBS}nȷ*>Z% !HwY:lN{y? ml"EzcɻDkMw*^\r>LpzEE׬'E|&_}.+2W-iYS}ٻ6Wd7ؖR}` {l'8{6!k}S̡,{TϐI%o%KW]]e2oWur(Pap*:>E(3BFt}} * >LB8~9I_o|Yӵww602}87I|;FTӟzP#Er)bkY אY~N~L2<Mm>wށB=% c Ÿ޽-ۂj]zi<3j he1#>t'V8-`İgJ[^uVg~~ 5tw|] pyC5k &UY4d<0{4J d0-={y1 >Km̱,|୍Xod阸wxwK{2条ܶ7 }]Mxҕv`@` ' NCc'3:ͩN+Jz =N3m9o@+7l@u5n  !T#^#wЯz")c#U!+ϓe^8.dyBX1Ȝ3Ƚ΢_mdPxmtkbDqYhE_3mq뇿^Kltd(ؿ}ߥ_FG-ݛ>.y\\ݿ iF띐bc]!Hmފb˵mb>L<ږ47Q#]2رm{+mFJ6m^D+iT[E-}7 | OK]M ;r<􆾠"w3-ᐝSvu-շ*]Q[5IJfb7T61z 9>H4۠V+jpKxOi៥W[Qlv%kŤ7B|h뷭bⲖގh$zu#9{!vȥԼ8Ws/epS hR|#Ir9=_ر Rw)1g-Wf!N\EVImUKCЮmgsjӰ' )z"E'8{ﴖp_ !]֥HsJSd40Ϭ1&d"~YQh_v0W;Mf)h$.{O|BmʤVUtQ(\J~7~[H;i iTm4sw3Od&llP|H c1)Fq_qpb";Җ.1-A)os^D@\̐`䲈 *3nw3"1S/q.Sr|kl8dɦkZտ?3ڟOo]m,QQ2DbڋdU(=H c `"dkZh]‚<,dy 50Qw$5T6cKe6$F(6⌶$q < !BTfa+i.UYy$:qgl{Ăc)0 29*HZ {#X3K!*WdLUdry0֐)-GpmOkbC AruNkSsRzDc\Ń\}_4@e:1N*)N3iUNY=QzCf9sMӢ4L= GSіV;d!$]Ukps?7rFx /!KB=JHiABWlrbLɔ35iR?o[% d*u"h"1Q.CWr"%lҵ)[{(79>6QTcDЃBmBU• ,QKGF! Z3F76wK&VCQ[ڎ"gã 39sN2!<:_Ö;;t/JII!EeE(]J1Ly2y,C@K-:! |F0$:~]!R43B5Zht0pHjoBP4(q/ZPp'='Ѱ?.e=8ܨ%E$R$bu\;+iYGt8:?WsMrM`pJS<$#73AAW"dUJ%Ub1DTx>S*krp!ɥ0i.g}:s8apFR^t*BO*3JQ?t[I9ێݎMo:/,!QcZqS?ާ6]DZe}OӋ~ՌֳWF$/ouY ^')X^VqR!Ioq? .mXJ*{^iz~ȓwSօE&ibrՓ)Ī43f[\r2A-=Jl3WUUc)7gogF5 &O/u@ԚVˬz :\NvY2 Ɠz4mxay[WzO7/O̸_ઉ4MZ%1Wg*o& $F KV9AŜ' o-G=JЧK/2Ipu.Sϧ|o޵?j>,E9Vɓj$>/Pg/9-[$i H-qml䪏er_}Rxj*UAK =z~ct4t?rjc.7(GX?G^&ȁȄN:W:#&єL}`Td6X]Q &oL5/iXzYg+s8Pgp:|B᠙U*f_K v_Z_F7H>!£ֿwJu\gF@xx#AHYANs_7ٯ·*\1{ʪqd6G 7,F cZlX32Tk.6F >H,|OMhc&ڀ1WlBKb|gaĎSUjsDhI0Wde(2?G4e[>'CJm(5{w?/6x/q< ޱC8^ʯ1aǻVoŗs  Ԝ7QkFMcr&*cjzQn׊ǜR1n.;Y>zp_GЯ~*<A-blRM5*OEgsS_$x#N`e ۞ʪ'$G[q_y' Y7jW+IQ)0xs4Eo3Rw˴E;a~2ccA>cqb3NjjZ2۴] `|jbqYz,R4ǿQ{fh4X(L^|C6ވ.>ۛl#y䞡⸣Z:#k:<}iS?oZ):T5aWiLFh&m3%B\Mg.f©m$?vÀw7wn& ~ڂ#JzݯiIIDݬǯBl_{K~0ZYťv]׎Uv,3n55FG)]b+i}jm-N@YP0z Z{1[,} *K=,|T:4-sfG_/ISmzܗWeq^jAuK_/k14U7^nɝ{ǰ Wˁc7Xzu[ʏ޳n<̻<'Φgʦ5w\5'Vzc:/ͤ!iخ0Jt*G2(#3fX?pȖ][/Ě++WY]"V Xt[ b^z\{)6'CvDpRX9\&z6r 8M(JQjYZcZ)"V"D46g㗵Ty2㋡WY0\*%V?.@z ǻJC{hb 8G lJ? H` 4*>}ysQO\|O~0H OMx.Ԅ? ?K͏F͖fg;6K:;O"/RY \q+NbUe5I1FÇ^wagmI gZr9Afx뻽x 6N#~!mDs,Ɣ<@B\a]jdڶ`v6Q-l$" qe;F,B0#BXYL^jʈhA #(H81"b/l0[tſzv៿dNeWQgy&AY'&l6w"h@QAsgт#*5 !S1gU)՝Ζ/S5[ l!);SF%¨#+5Qn* L2 3+gI1i}HN$^-9-$!H`b$'ȲSs@XA=a%W&'5w>t#g)#'$荹!hRӉ曦qGhuvL%K$ehp8%b"b1g;Jaظ3kvp:q8xiy6dEkTZ6rJz]wt9V%e'rt̟a 6XBF7BWk,km*8Ry)1KBPHJś>"I-\ծ۰˾ WMhwBfds!zo!h @M!,P2u^6f&`RMo+Et5aËmE?6LNˏoՄOp}]^ܔb$" qhAŪ-Yط0bQG"`"RSFDD b FрG!ЍcMm_ְZÊTȫa+̭&QgJ5t6*ŝ ϛ |G]0AL7>wfa8lsv6K5+y(f&Q:68P*tauz_RGჄ^,*Uݺ蓯Ĝpo%dy7qڬ]" hV0`^םGŷo )~]]XD'LjrSnwFa?/Q5 NlƥDw9ϕwSOKun< TA>`NDkڤE Kkv>+ʡm5L&2ʌ: 0#%V}/OVW-t;ޱ~5Ԛ=ZF{IkpE3yĄS"F"ㄊ^ L*ƕx)ya`"ZA۠FH"I`ci98 8Ssڦ=֖tɴjsF[he`S$Փ `u#\>dǧK†-L%8}$Ir-qD2`+ "~`,po48F"Œ[fUSɷ']JL2oXDi4NhLsJPP6Zi-ʙAPp2#DHD"#(0Q\1 A*!lӭirVtvB~?%1YM _CvBȔ:*x!`H3TƢ 8uT)()o+E>2 Pjoydyl,Q Ř#ymvRJeRQhjܨI43,r GSFc?~As᧐ 9͍LssY[QhpX4 /ݾz<Ӈ G`nΚd?˱ ՛U!vH!g),E.btpZqB."΂-'$4= /zLgSXd_c5 o@Fr*PE4, Z Cb)jc]`V<)+Qt!JY:ɰ:ӻJ&V>nɊ[)~3v':vttmo$R$!=R^/rg]&leW) =&C\ 4ϔ(JK6e9[殻A8Az (TxG$h)a~N(fEwZIHH&V <ɽT ƭ˻ep띖)E  ؂EX$.$/Qo(' VI \GaZZig!+ .2Δe 9v/""P/ V^(H>k[Å'|觺fz ܧ.Pe~8\~pV]'~ vG_z,GcOAywet%OjXJD똾܇CY+!KF%\ 9.}%_1VR@n9}-Yà\qũm@iDv2E$t*̐Pa|- wO;ZMIƅ?_w~駲(ay.O+‰y:'.*`e\>[)(4ye>2tV] F-6`Mrk qwպ}NBbuIMliѯ0 gw` Cř3Y .4wP͛d^V'P'fYU&1v;H@W Eyx"̖2`-`f#p$ymlh 31oK_#"$F1˯p:2 &ȴL 4TbvJ&X'n FTIOZ'^ EhQVj9oc~O*M6x#iD>aSrsHҦ9)9LeZb0Jwem$I !@ ڍmc c<0򔵦H5*)J,\%W/"##a^`Y?GZrÜ%IAƒ%V8d]%wMC·x_Ʀ R4BTut b^zH aN SPC5UpxQmt֑%%  -J) pD4Qky-sgb`g^IN=Ѷv%Y/;s #0۾Od*oOgM8UӼϸ$D7{g'12rJƥ+T !c0Bޫ?؁x P0FU*'cTC`Ob%QVL v8X[-mB >։`0$ . KƖ#M&䩆 e`hZv^`>-oG-9R!JLFKL澃Gža2!U63@=rÇ&ec8K{dcOL.{Dd%0l\۰< ' n?չmp{?LDGj7̿0dt:eγ!wվsE>YGLd ( H wGsI>wu6Kݱ;mFO{WK^@Y^o;P5k\~^}IiN@e>(i||:4UN>zSzn3hiO^7щ lJ`VKp 㢩vND-H)UZ)S$[Ē|qF:]Whh'L;2)a?L&szԄ/EǸ./gqтVc`\b`#[]FУcʠ:~*/îXOۚq`~;cH"j`kzoB!g$3՞4#(z4zNVF s0yLo5/?>G+uPPScx//2owdV-}6| Vx?rux')&?u &-nm}?O\(o%5j|:Lۼ"5 bS|M4^<b9VG^rhH(eBS& 3Cz/yໃl C!fH sF/g)-Q bΰ\۶_0՜]0: Jn#}GCm,P;$:й6 넋QvWF)©}qv\X⹽]'' X`WEZo#g7\40X x$ )[)0wcƌL"^ˈiDk45Eʚhz*ER`o"B<LZnu+z$CR دQ7/R- 8ɰ[)rmb쨷/`ѷQ f+;ιWF;I5%lCwˏNt5Zm&zE\(#h+8>jK fd j0DVh6i7.§.gaȩUnܯVO~Bie5x@}UєZ1$hIד rXZapcQiT`qːĒ#u:r=VpfѼΊlӛYm謨|kL0Fwf_fX:khn4A{54֚-z9=>4v"&K5 >mֻyL2YHcEM3,fֱ7[4-LmP}YmlWi{j!I[CO /Œe> VH <H:!\2j^ˆ^FcD2Y+냉KM-a$EV)/RD[Zw6 -ے==jN }g dM$, $4h5)ƘJhSFI*m4QhGJڞ8t'#qRqރ;#ܧXZM\-=o̢}xq-:@<0.zMP"LJ |dFR˔zDcYxQ7[G!quzI^zgd#7/l;x (T$׽xރUus>Oէއ-:5}xO$|. (~p P @:TУ~i(}O~7s]/+{;z36't~ iQKgNhUeZڮI}~c:\omPbFVgme5T4 %\(Ud?_~q ՙ_A6N. @Ex<3uP1G EέRȰ, Ƹ#aREb"D46.#ܙc0vynUe[ҶK j܅9@ %Ay:^] MU_vOh_yG>|/q_'NAQ{@OQ7!{5(6䵧)v(X2M9I&Kr:1+d1L߼NMnRuj%0[2/oaU-RG& oM#-J֕lQ+?U!oU8K(U=Gʽ->-/ʬc^ JɞyW C`^zMׂee!T1 #tLrG"|ߜ.֣W=taRLk=u`{DVw^U=*ףa: ~UY1{z|!e,\Ǽ*ں<8)8rș܎ h03 ۙNG.@,iWRgc^'=wCo^4!Z^z53k=FJjD#ɅǷKG J.!Hc%torTg?XBOK((4 dt@RJ/8j#D3y=42'T)MdP&Юd ႉh m!M$i2#$ ~i9Ik 7;H;e +;SG/] Yٹ羚 ]LibMTY~ )+JBqZ$:ʵ!$9qX|fk[5{ c$Q,iGQĶ(S)DY$6EFɀJsJP)'6Zi-ҙAPI(a+HD"#(ˀs$rŐ*D-Ykِ2]}Nd&%?XQa `dGDPU@B*+f)EiX4e"ё@$pRè7[sKE1b+ ׎ZJ),Xjs9j?EZA-7wzR'MyPh|\{Rb! "i؂|p4x8Za*Plk{N!x1VriY\8 d8Dv֞Ev3 z.y*񱖉o}CFr*PE4, /@$.8Rx-Nv vuyL4Ⳉay*u𖩃FЛ* Vg(+e~e@Db@"8$t%[Bc:#{ljoޫƹV _p<LzPv6.:~Pzj fG :ohl\gQyE]YSsq940YIiqU|ϠoAv,Rp]4'HiWG[ˣ{bu߸|*4l+`ܣhJe(RYJ4N1*$"ad$<1r@AY#h(YJI0T j"3-Q Z -dap) 磊S7`(^8?>xƦWS [._mjK8-+Kb? ?Iz‹qmFhFc_$Nʓjs 1HwnJ(4./4*wHp;N?d7Xm&`g<hݘ<vrA<܄ΪkJ`0jtb 0?-ְyXW/#ڷ/)äh<L-[fMMh \Q,ǧO[If>aqWEMUO_'Z5}CP b_שd SZڃep~{݁O0`2gB/iRd:'j+>q)ņDBIbE Ty56jh$' } : # #c*Ƥ#i`JRan!xBNS[^Tpk4D UNMoH3E:QjA XlBߎ púߵ@"#Jьd3k,@XbC,ѷj\ ط^Q>mSTb. :xuĞ7b#AK29a3&2OdV`Wκy:Ѻ$ƃE+m}Vv[9M g>LpI:>+rl#g4`)$TƂd#"ᯠAU qj$NDbE(xM04@AQ~hbU%g2D'HkKI S"!&"ŜRiTk6 dlϱ;j ƢXR8W%7S24p$ h ~{'xZ9(ՠ/ (#ya } ;wgo R8[6FoƷ}s&FHXg pD>eStee\e,:u6b؂c`bҙVguXToAsD>tpT#aԚT_vr IUp;oB!gh՞<#`HbaZ+IojF c*7!/Swat jy )o ՠH]@d0|b`}Sܦ/dKS4aH@z-$_;{iP]1F: lp6HF/g)_8^sXe{TNTrm0UM̶R RiЮ>XmY6U.ZA.vtN R67 _α Öuث^vJ[^ w~6ɾ :UoJ؀en~6Uo&](ݞwvr ܹOU;"b^~37.;m!ӫ转ӕ):jkyE;5oYrϞ&i-rk^tݙxc k g o qBt2?-{w!j_0١ _yxWui$|6rybcʣWRvDn -Gߌ~ś &]kJM?76%EΧWvY0^m]qLuɛѯzjq1~Ɣؕ=MF׼ _i o_e~ɳY ) ͸G8cz3n-Q1J;hmAGܕ1kFΊZoӧTe͎&fRHy`ak=(n_q )řc1yӁa{sʭB w^/7H`B<АVa :2LAk5f,`ZFL&Z i887?,c1BZ ɗr9Ɛ$u5jxT8C@Ҁc8UaaR"ڠF5v mR$5t[)m]רmյ#i rOo_KVck&GkQ#brK%32x5sf"pa+(һ͟RnC bQcw%wq޼ Q>e@Y^ȒG$$$@tY$h) Ӂi]IՆ X3{9ko"88fSAbP |@N@kCj{1_d1mpKc=}C<FHH5pFBb%#11)DXҠ8KFbR6xisZi=uiWa0E Ho<-DN*\gXPtu8ଓ> moFc]# uvGuGRuR1&'d1oDBg}tw*]tjgrnڮM BWۗ^!݅~4ٞnJ}<~.hMF}M#6-F;>> [H1Rh"#Ψ6cbGK%Omh9~(uI`r{!N{\Β+UcJ?t{g诓:'tl}4o`t"q];+>|y}=#}5xvm}mfכw>{ڝa6noqu{_wއ x_aj{u㬽u2vn}~{N2,Xm2XC,LziWrKZ/%Mf$ n:i4 Xۜx>y#?Aj^5adi l{sJ}M?f[lfY+9E13(,=e`^YaTLg çn!rςd\ġխj!\|\p7&(B6T> ~*?*Ca ?UW~.Xl`Q#̟f?㠄OtA 5].7illSvSjah,s!<ͬq`FӬ$z4k2ޖd:QhM&?c\Y'se`Fu:r.͎߶f?solyͦ킡[C^ /Œe> VH&`A4N9l-8ˆ^FcD2Y+냉KM-a$EV 1FܙoMn k޿kۤ>P5fz-aԪHsֈ:I!Nktoj0Xb6 vޜ |C_mSpF%xvv-~h3.BMG0uk)~dX+g}d6 wI&IQFAdhi#aL}O1*J31;cVP0vY4a#A295` wE([,3Vك TnKy}sz=7}B0O&d0|CAno"Rܛ_&ӤmfNvoK xU^jحܷA8YHLH#qHOY ^a2ׇ UޓE`eZ*1!{Hu _zHyG8V?4yI5TS$jKϣ VFj'G9DŽRn&x9H#"(L-J))$km:}r  }1ZDd)gD\7U^Q8ץzv{I4߂bbb$%CIb-`fuLD.tEx94 7NI$aR- o6v\!_7wvI$R IuF6G3~t xv@J\]"?^"dY7=\$`"{&CD9X{))Jnpb΂wna8$3:wnc.뵃v^NiͣwNmѢg|mxl#r=gn̞1{<cY=6g3ūc7Hz_ՆItפRm=E2lI۶!OW78r$\ཤ;ݕʋWU*[4Ɂ :B>;/oXo kbu}S-Jx5,^5 1o5b$\Rcg] 'J5e&:ױZZ]@fPTޟ8tǼ-9NZ~|yߊLX*⋧_uE9~qWT*4M)+䵔 MK,RaPL>qgUk5B?ȸ̥62_ၱiT dV0bgrM>Q,>Ry`ǹhI%IQ($F(X@"p,X:ʼnaEF}\KYɘ3OgH5tA1vR\h+l0⹰&-qًpl NmDC{+vVY)S9ܙ95f'+yջf,Pwm_!&/X X< %Q}Ʉi+Rvzx:(H"1lzUWwed34DO? &nfU }yi- V 8끻;7,.ދV6\ň15t1*{C2{KGpD8$e 3HʗL9LX Jԅjc\덇eKDuh_wG; y=SPeNb>PL6S18뙎4 V:+/Њ̀y0Tjڝ߮үڈ&~;&Ue|ncEV??o0:8m ?;،lk ֋zWEm֋Zvb@6NA׮f`BBijP(+NBD,h~\GO?S6Qy@rfBQY(g޾#}E壴,Jɧo?f<&+KR ǕJ}揕58?3YNc,Xϧjqy'l2ʫ19GBo@`9KW  \B.ye96Z.Ddi GwX{ 32UZ&zdT@f?ϯWn&CbǕثHtˠYv1!Y6%7$,46ddTH+8D4\%ajT*#рBJ=sUuXUC7WJz(B9oTXUܡJK4WVKQbrAc1WZ\U*]o^r \UrќXAJ0ˡ؋7* *?ǟ& V{Mn|{ꗅZ'?d޿~=͉?ޛ{<~^(%  QJ v\pۅ"RV({L+': Ro% +Sj\fߐwdn=^0NC"L=X_Yk=hw5ga2%z G&YAs.Y#\H 84fh}IsT;]Y:9cT뫔`}:c>]_J9 uu:^Y`&{a2,LX Zpuβ_eִxA}^Q}^N;Nq*KsB8D8@qe-6- 3,$B`--KQNhtin|{ٛdʇRP-.B,NPc.TډMbuX!٩(ʢ,YzpAY'Tʳ[Fmֆ̮uz9;YW;?.RF:[+ωZeO(,x)*Al{UKP`Y-rԘ)c^Q OOHZk M&"P3>N㤵>2p<に+FHA.a2L:'c("JЈ b0- G'c=l<idbubչ;mJ(/GzW#]tgHgsDoX!8zcc`_T]L u"´]g቞u&"U>X`|,d-(s=-]u͂C+HU+FxMd02" z (JѝzX'/ICt%.^̪c4z;&MۤY6Y#{mc/cޒNnݒd!l\MUTlr%±Tժ:U*Z/כu7nTo~9`(r@,5>HѰ(;U/;rWw}!i:2=C;G̏o0۬@N8 9hV4ZM@Q#Gh] 6Dm2IY*%njm ~q!1{;uVWY=w_fyTl\/W>nPݞ'%;vr] .^u7%[_hvajqzSv[4vXk~qмgv&a!煉6]ޟk]'ClWV́Geb8J-[ uv}0|zRi@CmT lJ1t븖/En57koH~ HϱUMM:3g5v<, /'9'y껷"AR"~"Gvr%KJ_xn9oRq[9SJvϫ&XfiISgJ]n[LnӦ|)2Ѭ Ҵ>7'JX7PzEzlZs8=[(P-=ݐ{,3fy= F5Dj"0ۖcڻwOtwBRuQKiIGlݦ@`W%ltFK`oS*(IEY *W =>yjx@gNa&]HTS:.W8u]j "0.2<30"zj'S3Fv ?dUw5]tt3tzNpyܳFs.:C69 T&`}}Ӄ/*ʘKXd2)iI$i @2`ᧁJT5(jn׊C,PÖbAQ{v_5H:Gs?8n:;X2cCQO~~;,*{L9Ԫ2^/2eܿ2^+d.׫C) ĜSCJ,U)!<]5 $^}+*\{^zݻLm[թ]$~_3 N Kya:nu;ںY{fxvO6WCmܲRZw./yF˻1"q-'zyVit~y #ƀF}Gv-ᯚim2`}wW oKog}L8:/peRi\ ,CSpMQcPѤ>>o}[h2IR0"'T~IiYn/Vu1%p=@PbrN+he )*"HV@R: 9l'+4O[@SSxΦvG44n8_W?>52nKyKd5RgI $:5bIT6%3YhoM]oAw9Y +lrO0+^AW7(ֈdWM{bQ^xJY`:tL &K( DM02DPe::Ⱳ_*XLu/m2yt 6A61qzaN2 Rf.1Q ;]W'۶[jw&u>.~[ʇmS_Ɠ-+*q_wԇwj b͕yI[Ȭϛ+;Ya\_ƚBTAxKmb<| i1A41xgGS=@f~>GFeՀ"Q}HrNӠ,^( $$فŀ]+&4A?{ќ O-#uf@)CE``THʎSIE")2rϞsyf,$7Uߟ$'ǩѕVS(ձ(Q$MiZi^4a/EyM* &`v?@2j93pi>8p2 QNG;Ť| 3r&ZQ2z/y{,]XaIB(<ݛ_;.K@JEtwsw,j6(o(U\mG+"%Ӄ'BOM8zU {)VH^ Ig#wo1z!)O"M<18x="3o4cg-U=:RTհs{V}%L _&Yd.,DrF&n"sM-gǗk:m躽{]5<|RE`e9&L%AdseqG'2dk (gJ6VImKUM&[w﷤zu7HNi$fWirI mf$)g.4r97/eU:)ض];lVr(~tf%mcZGbZVp#38 fUHٜ?O775zmEe SCGlw)bNQׯR;8ou8k߆\)iĚ_,OґΊ(䁣0t06F:1 X n }B=?"^[vwVS6.{:z4&b֤zG/S-ç(~4|i<ϝRTfX9yU#"ɦzS8AaO*Ӆ~^]˗P#rI&boˏ@O5!xs7xڃMAb`=۔6N7cx}Ir.Eѡg3uz$qsͮb@B߹ܜX1aUn>Z4靳7lL; sut!Os=n3yE 7h>i~r2w|ƒ2*l9O8ٲS*=0e5Ic_[| @yS,j8'b.OCE"g,,b^B"p:x`ƆS\Bbl UsjV٪9[5gl$e+nt3}yтQ=(l}j5v_J|=MӕrOد,g)݌}|29XH|rSGC|*oѶ"V|ފnZ*X+>o-EV|ފ(Zy+>oiV|ފ[y+>oƢZy+>oV|ފ[y ZkWOq9/hptEkjG^y"Ix! S*a ,˚;7g./铮٦X^:HJk\$QkU|?Pu2i/uJd C 弊Ix@˔.qaVSUUz2K[ 0>ʘ /QNB%ƈ{,DK҃8IMɵ Z&J& UxR.`5\%;/w=[}4 {nmb:J+SZv$fWirI mf$)g.D\A(=TZ:Qb]zv!ꣂ>5O:oܥsM;'<0壱&'T~ Z&TȆXɫsg0DeVXX:?}0ǩ{?AoB㊽L!)VM7yvӣ=_σ ՁlGޣ.G:0Mjy*fm6pG Fc֑ۘi-txJi}55:ıbɸO٤N(hl%lALuX'P) [9E 7ZNb*1HZ02`@Qx̨M՘hΑCB:֢saG#0gC6tBK2' `r٪d΋4=R<N$)x˘ERsZy%WZѡTW%uN]- ٔEC|ly7 /_rb7L_==oj/DĵVY{ ɑBJ(Oқ 4Y'Q6VC_ gQKɅ^>L:FB> ,Zg3Dz(8 rFLd%0d-(fI OdjgS\uJ0ý A}϶j X9ôY@[RcI@O8JϠq'2HcȽf PފzȭBya=^3=l6H/^%/q $jUZao~R=^(qANY;L!&m `YY͂>@nW)*zTO3]t*{Q cSF 4d -4#αzZ(u2=񩀯 >U|]{ ,pr?$dY8-0'R^`I\!PYzX. o :IȾЕѡ ݸ]]t!ˍ_@)('lrd%ddEvSɄJ JG]44[cC&TK@9~֥k<' a;pBYݏoo5$s;dH\nI eN+ 1%ZjЈܥą`Q*76(3. &-A-ѲjZٱt)ocYWʌFڐSj=e@"@ #9L$C.Bn\ҡ"wz!6KA iDZ_PpڠpT6V` 2I@+#NbqTHpҠq֣;;e ٺM,#CҀ:3"ZF'@HV AedpThf^4$fd{0D'yZ*Jd=zN# bR1.}r"ةFDH0J : ! $#$C ƒاzyfg.0z?nb ?~u\Y׃qJƩՠGE6fBiG_n'230sKޏ2ԬyS#*&8fz{n{ZYrrsҘ(QK3n7./mg"p+ɵ}ߛI%~Zθ eRq(ޓ]J*Mzϥ(4J{qG2( ~-<,XZ?vf'R‰cbJ=u V%GOq-B$\'dSn.Rvc 5 %^c 5=\n^^@w5tdM<">n#7ſǻw[Efa\MKQ_?{WȑO#j؇`YA&eR=ER"%Rf%q|aśK*W3*_p*~^!=Ŭh5 p)\ì JMFhu+-"?8> %gl2dFqBdMmq'82~2|q1J#|O/w3mS4:IJc7\mIwf4Ddi\GoOOT]5x'wDU>MC?"[JSN:T,?g;s`AnΧIa4Hع~ן>2ʐ>J '˴DwFVG'σHYʃU[~0j8.}è4K\Ҕb2 H§@Z7ӿ=Og9~oG8QLiAՄt% ]_?vfaY~XĢcc*Bnk|?b6߆wW _m{>3Lf3/2wf{?M (=q͠41^xAx񝼱Roǣ{㋹OWSL LǷ }\$0xdDŦı`@T}>BO7%<o!$'"~YG5O! WqfqVlfsҌ'L0clTd&1Fy/lΆ3{ wنs7a\:zibLj!HI Ő˔ߟ`z4M\$mߨ0t ۤF,,rR%ipqEf1:̑c$֥ϫA:AOR,%5txNuugW76c x(s(UR,ͭ)29"Gr Լ18Q&.$/T BOrCH \{1̆ڽlk1q[bq8ީdU vfnﭿ&fm'myj8WyMd/\?U,<{ϬB6fil켈"j&E͵D`&jXb{Q(*6+5fa''aş hp8}&^XYj91rvCHQ&h.毒YN2 O^VIXV`Vq MƔ !a-AM%YEV'XCžzeɘ1mjR p GJWnd!-. A5dlM^zI;A(T>VETvnn7/f'JZ\ce@hzZ ˜r0R kwR,rQ[2PѶOLΫ :;{wst:7[[ݧHdismN~$2'wn nnf7OOy32Øl6f;!lh"̏O>?lK#7x40bϼ ﴅ"ɷL1]jՑC,OzC]\&Ky*$2a,gȝ1h>rHVǶCuB I^x%h4#&YsyWWWPʑmߜȿҁ8C1 ³gT[ύU P_UGGLA8sW5 IwtG񜵌uet>9Ť(gS*DTFktOһ1LL(h1N9kd}A wPZg _gpCcck,כ])3_elމ^iϨk!鳩JsRյP+t׫*K{U]?LUW0yF 磮 J*Ԛ&* "%3g ܞ*J躺*TTWJQU!؊QW\E]XpuUWWP]ie3RWDp6ꪐ gӑP; +bU!X*ju.PkYU#+˸{[^*?߄FyΜ벿F7Kg$mx?`}"&r= 8g4Y\N^qog[ĩ CO f8*Аj{+f,"BKxq㣿>6~w|Qk602,1a8 Bg."t`13WY{c,g8+Lxp;]|<96O'nCϦDCtQ`tY/7tRnVRrS |/ O>spt'—ꎏtkQѢ۫Ǟm rr^2_^0{<`>{ ]#W'^(X(CL[Z͹uU{TɛOӻU{/_xX^5YmaMׅ¦Nz)ASA':dZ*]+Qs_mi:)Q3;;rh=D.\L:eK7Ul}K'TWlICԚ3!QIyR<&Mja/!&!3 q ĵpSĠ*=Zy}lѩ9I]pƗ\SabZyW0j1 H29q1s.PJoTH7W2d F10@XE;V㉽\Us 0>]^/蝭`zYkwnCݽu"* ^ 6fWirIIZtqsSnc}!Bx>u}+>9M#Jk9 |8,}2 #,aB"熼:̈́4^vpݘy}8w]iy7Zi=ĵ(!Qjk-*dX6L5>Yl)*EA[K1սbHJ}nxڢH#J!05A j I !s tXe>QjLh0pBPi93% .G߰Akn6T]X[ #I:WP[Jiqщ7ʽ#M3QO+IZ+/ ׄRL`+}{ݫJJJ/2t,rɣ\z$]b@`3(ӧ%K5$(e6mEf  b~Ylp>θoul`hD.$xd K%IJI L21s%&JGZ&$/qCIT@.{rȲ !TIr`XY]_Y@ǣ4q|M%LoWYsBqn@?vt9Zѡ]:W% }W АMYT(ϼn%_,.rbL_.mzqد/OTDĵYe)N#7HqtJ'H sIp#{ɭ&rv\X@ 29&ʌN(1 M$(aѪH(ա錼!(B;Ɖ`І Kf l)Od,gQZKbf>=*."Z=ko9E] YE 8L"KZIvƳ~Cl,n#&.fJK%4hx YA/y-4k- 4IWgz>gbVuV!D0Lmu饔«#+HG?2dUS¤I ΅dɜ#bFDH " V,Cdy[8\(BYkT{Q cSF 4d ,84#αښlT˴C!Pħ&y&b8dY8 )͌tqy.V,XK͑B`x}(<:tԡߢr\kp..UeQ$z2HhMl 6ZDP_:'`,N '燓i|I3=2Rtz+R`<DэW\_>}ZŝJ-n҄袆 $[:>5Ւ8#6NΦ,'fNED'Ws=j{Bg]o;E^,˻y*j.5"QxÌyZL?һa-jɳ6Ȓ{G/!ĨbEKHsj8)ة(80Gai+G˨y0E }yFha9Sv}?  3d]> UH?u\}\CM:ո{/owA fp2O3KV{jXW#*&8~뻎ηt];&[y4+a|:i(}MZn\Lm(u8+$_:Lͻ#c7#n/N[m:O{L9q_Ļt* h01?6?-۷,ת&i n3Q7rFdVbPsgTٓ451f˝<ő7+R_loV/MJ@Ε#3ٹMR*(Ҿ3."D dG,?AE 7$R9^գj3E[GmSNa247{8St SU@5 {M}E,V1Tc tcuOlX0%|-& $n5x"~YEU֩T;J=j~?U8W~F8С2 QJG+Ť| 3r&xCUs0T|xZ%-qP)qaOuG_^}ti6r6Xt-g8#_)٘JiYf6+,FYHцJ*ɚS=U~Z:%<sb/BnV<ԓ,D\2J4Kp#|Ɓ[ȑLB.7{;)VuDنo˿b*t!)OtC&Uq:zDfBӵlgk:z"W.s7':=mzEdt]+j/@89o^M{G*c?(W~;6OAh܈5K $aq(1$ CX:h<娲DfYFD)4*M&ve9zUAwA4l6)5>;8jJտa jT~-yzKmcOrѭF+H\$g[1`\k@kp' o3;^e4MmH\2Gbo]j@m=7(A{ZUluО-0OA)aA'Aڳslǃ^;bf@ʣYLeW*muy~*UT`*%i2#J:޼fN/ 5)H`Dr'mT1ML&\(8ٹ9ζp^AYW>iQ VYګ[퇒cml˦Lcr(xݜeJ+Gcp>˴@[J0e-S꿜t{AwƮ| _ԥLiʳk8fyԤ~;17Bky ,uUp:^!;33 y3 3ÐWFMh_?}d ՊB9i4V&K)5U !ElKM8 ?귙URJ_u1WRnwveE&oRXb Y4`{< kX4]dnoP֢y4$ӎ)4ZG  umCgW%ceWo])&*aW\}4UšQ-zJ#9"vKzs4쪀䱰-CgWݲ7Ȯr,^j]phUivqi%4!j1Xtwۻw<[< d*gVIy%{\x!%r[&ԞԊ[}NG`&qA3:̅U9I\ L#1^)Dp%2$e>oI" "Zy%p٠4,Ѓ&|f|Eh7T$S6G4Wii8/7o5EKf3JZ^>~}j{*2QfG2$FdH6 iYc-d3DzddQ%=YIJeBYL6)D YclHg5]yHd:D无MH3g<+hMDYf(yV:`)ZJ2hIIz-?c=?@R9rQ,gl6H/^%/\A:$՘&NjxAߝLf\Q&s֎?j9 ),Xb9oaA$B 3+2F8M.[ɈJ"8q+{(ԣ{y.uJ֍ Nl[:e5Ze`!kdšyvEdܷ֜G`q 5"%>0 ?{W8dvGU`m B#O(Yrrݿ~"ÒlrJlʗJ%8^#_ћ'bU眅HbYB 6Xh HQN=`ji (&!j(+&AX!U3dW{[u- 6@" zPjM2!xbyk!oV,TD VDXe vTRe.(ېM PV+Q^&CT>. M.z|B(#\" T# 5q΢Fr@YF@ahP#'y. jcAଶ% СMCF祖cw) _嶬{BWHh q'QVTYh)IE!a#Bm1 J:q!n9$K/T~T#ZC3h<y ) JWv~ėf_q13nge+rtk߻3;5lqsj#7GMU'ɧlBkf\.^gtO0Mo1žc,m\1qD}iwtȃv>MFYZ"_ |_#9XmpM5V9ׇw&#Ulk[ ^ 80$Z@EM8j=!وc_.5U mv~Jͦ>J_G6NfY|mcEemLuFͤi/fDjxT |s۩ʾߖe=`Q>r3q }zu>ÀAރ_y[e)}G`1 eOP[vɪ /O|Yt*"j>9:deF8t  |1!p\ON жw rN%sUй상D߉\D'u4) }>H>SwdⓔU2o%g*DDÊ hbr)cD8QX^Y"Qɤ@x܇㑛IԕG+PY1yY*F9鉛R|mvTZrPi֮t!C>@+M˼XHfAhy*@` `b.NRGMEc9 )znl("W$eLeIgSG 2)Op,Wl9cu+ԙBZOiW5Jdsj/<ޟ.jwGn<7uN%{hߤTOWcgr:om?wM#Dj0u4rSn}9wW$G*M Z^9l%)a F]Ad H2,8Ab0h-Q z,)UJry >f^ *HB 4侻&(řP 82Q1 r @v Zm8ϻv9ֻA9!8O#;@f?'`R~z"5G =#?"I\B( Rx{-3twofgӔ ,RSE5+( m6>9S|õ p5 t{(W2JP %&2` ܙ!N(x @mz{/Rot˃Xy2~]^Mp\vr,Z}S=}[?7  GÖꚼj\[%nV{Iӳ`ȶ<h 8j=Yg`;c:CǖeBꥅH/*MEe ›]elޙf^C^$*qɦC5?R9vUIr-MfVD}W"Au:k֔z.hCL$]T7 $GU:zTATJray9LFWrf]i$0AC7~CP`C.-s~8vKB{5FSOY<:|n<뻁5A*o+>+FkpL}~MU6F)D;MMhsD Dy &g enٳ7nǻ7I9Kt '%Y37Y\DAbȩf.<,K@%-K(2u(3!Po XJ'Sh_rv `7厳_NѰ)oS^w@}d-<SX|6LNaxpΈybTEK/jiׁeAbrO[Iq N:-Yr-jVK9k%!qD*$X1`V R6*m-ge܌Rb IơPGsXRTT֖а7^zD:f4%D*@hpjTu"*sn+KX^zM5혚vvX+t+'1Q;w3.qTL551;TE0g*QFirrLJu/VI:BgJ׽u/x;V!k)0A 먕 Mhi"y$#`jb)m%x3/gRtaii3gF37e}ruVpyW6SY_>eg+ݜ]MGo/2b̖.nl&đ'Pvv}3HwyifyFݼwl-=Ct7_YCwB2S^.  ٛȭ*1&/[|f^;KQ5%hȜB$*+1p΃!IPk\{~vIE΄6Kk\K{Ԫa2Ւ9(ҎGḈ%zR5:dF*!K h̀9=0"ALjM<(ÁDVHu mm"p]l9;Fk6G  ]R$q61HFң2˹Ӊ .OW.YO+`Tb2q]W nւ_;\AQK,yUR>oJM>oϛ֣m&Js0o#V"Cĝe8 !UqݦLxUxՓqR]!8`|arζ 7jp(U!Aϟ%ljXJJBBB(MS!E+UU-j,ggiW Qё[,SŴ32mBPA$JγL_0ZjBwf/?PGVW9qb/T} PH-! &7 o.o.z5(Yߕ߱d" 6AUhMNPݚ]{:A`& inoA\͋MPc 'wj肻󷹂~ t| ڣ|n8^,{\|ns|}?ōjoEZB>0Kכ^o:7G޾d+Ⱥ{nګ]Y#w;knt-/jkW/>4={<^6w4W^1ϧku-zusfJu,:{p;ٕn[^.4lw8" y6.l)䧷r]rk)F>x<@S0J,TOxE45paj»#/a>qoaGZ] g:ϙ3g]ٜcDjIb2vSٌ*Ɍ .ǁ}ّ8m>޲S*-9뼈G_ٕtiuP@T(8Oy %N F^vP'' r*)`R9&b\,RR4=#.i'ՒRm|j3/g0N1ٸeaSmU "\k/?S=Uʣf,HU_vN%_ q;I%j Uht@D墶 @8e}G3=*, yΫQg}OVíαGGgBG+9< ּrA8(F`}׸k\g)562JEbTAY,;9"θ(1D` s3tC m|>A{WhP٧ݜygQ6<}I%e `LJ䆇2F {x .rW`oЊ`d14Jc,o5a@g:EKbLx-*UbٮOǹq\j ^]~46ro+<%%rvz,[boF^URʺEW= \ s+եsuk\}Asޔp}A  L]˅AWyv\U]}Es}%+\0.bU^.tW{lqsWho]o9W|Yd`p>#gzDv,YۖwUͮǯzhԕg(*OE]1T)^]uex5`dU%ݫUV;SB+JI8!uU 6x2ꪒ{Q~DPzJ#OH]t*'*x2RKG 2(WHksW2_߭u^?hy~7EHAl4>[RLAR7pT 5XiA9hS%F(<m9lb9XfQ4 ';[x"* )MRw6gaq];0ݦ~۴_5{80ai磚5_Lul<FσgSs?~M9UQH[Y>,nZ^nZ8Yq[o{?hI-;awub<>] |{K|tMѮPy6nsɓ{>~^nۨO"ދF>Y |Ԕic3[|]~2gW윌bގg~:͗axs6M1C[ʷz.ڏ˿ݻ,3N+c0jMᅬk\M?X+>H>[n[|ۋÁK1E??Mo>0`Xʥ0̦Vv/4\ߺi Mns~yFWObSͦpȦJHK`kjS*Y J*ťH4_/y;$PPTϥdEbDFEYBfglv)~?ԣjO< 3Xͷa{ʮZl,tko?)cI լiㇼ.q8(&Kct,zlXL]_L$jۗo_־2Fˉ+4}x2&d0 &. 4_ t /dr>Yk+XݖbA8k-iŸ*]Jꌜ]-tL[zqGJ/%F>>N̳6S(B  )54m%d)}㊐ak?WXB1NFx%+R+IWGcK2nw(OO9&ASYB $PR.9CifHoپ\|Qځ2N&"rCj:w 2 A2&ؑL8ؕ[<34"ٓRv&͢'MZ)6$L!c=gvҁJ1FET$ՉQ:gN̋I%"iHȂIf ebE[؜2n:QΎvHQVh!W;c͉~ֻZ-YDe ^:z=}`7ȝr)1&AZ+JmER2`3?UZA8~R'݉88!-@SJў~Hȷ b(>tZDi }ݴW('8`b"HG djXrtVc1B)&A{{H@襭Fuzz,@&}J)`SNTVcNY}1X)%na~MXKDќwHKF=44k)RQʶxJGeS8:-=ujfl%mJ5cI*!zYvRf.4,:F+,6V1幁|Uyvo֨<<x\EY׺|l0OӹB6=mq4<'V+85vw?٭M]5nI<ޏ?`zsS+^s^@bZsw {L6$]CtGqv mŶۊMi!0˲A%RSK5Zuv:(|_e+k. PTȄde1k(e ={"{Rݨr&Ne<<:LfFqvSCA*voT KOC=Gb-sbN8b_΄E?Y'Z*Vcɚ6ӖC}wEwcOh -AJ I959}Fi \) (>d)9*J_w365co ^%_j ݱy_(# XcTR:ks- VD 8,>P (Z7 Eq1br3F==Tt DƎC:#gкI[I~rsTvɣnح?n4ܴ>@9,Z|K71Oڈ!o|q}z,1.Y~*5:[kTBI֮/tBjv$&a 7^9iߩf4XJFkSQ YhՔ 2CO90hTך3r֌QAtagX]h:օׅϪ W{}rå78N7/|@tk|U5:W'Y I%T`0t {º"etl;"aƣ\MZ:kv6c++6@vpj eFØ.N;‡TtJJ&أfze춌9 BBvFeYUT1TW<ޅm8%^ҞSNZžA(V RLgUQ;9ß&4?lÕvY郕dkogV:0j;f;!lxrei-73䋺$;̼6r;?L--nOzϻNs1'/ǣf^҂zK9k.o9tײ[Occ^qfᄢ&^c\s}.}sonτNg;Pjs=gsE+eݫ&(BN~}zK5/֧H- LA4F=lPJ"z`dNS ,Ĕ2}ptP EsJc}z;#auSG2^oTf#_ZhRwm/ۦrQz 9`;I ƒ#ŸhM2[ڎ<l+ ۍ. -u6֮yK5޾boL6>%TM!}a3W{$ذjo^ob\ &i0 ) R@~+-Γ$ʠ1lDI g{HW!#@nd]@;۳*0vHdRp?%ّBUt|a^Orֶ{M_͡;ά"WjWjxF'厉0~zbcBD7tïsl37>1I`rV[&ha;ԣ[>I 5Vc2cyյjG5x0t Vu% tң}j,nso5ONx;8`o a7o,EZJIMQy%+QPl@-Ji]KtRJr`,š7asuAy 55vuBA'ňXG͑~K?=<]burGyIꬂoHDdj @؉ O;wۧ`., wnmCݽBNIBdUbfcΦg&yk & &OxGl9U`Xв#bG -%fD")ZÈ iC22SdM_!ǧsS(U_@6Ͻ~}{ؽPo{c^Y T϶%w&vR`jmŠ226A"`}WMw>+)MQ8#UAR5_hd<92،M'~ 8b w0kEf inb5!{go{ FEJĨ!R Ae3la/HSTn{EZ&^g fgޕYQْ$RDԞD;ȹ_7W ǵ1v]_;ZK~_{/YJm@A}4e+( r 3F ,E8&uU hU%ᱨ+>tuU$[TWd-t8E색p%~\k O bt <mp>[cxQ^8'\ٜ5,}ƳwccG Fыf\sc 8L//Fָ6Lous#KoR! $-FBr0/?x(sK)P`*8r~~|n 1ß~4>Ï3ޘ4rQbX|m߿L9L?y޿O=)}/Du)] rmj}L4dii8j";o3)T((P$e$IYytJxfhY"%^r;f^rIp/gh- RP$Jxf')ƨ)}d[$kTuʙkRZգf,-PPHBPNY6NhQcAbs&2n;QZČf6ﺃQkkg]D@ oѲEr9 *[KևZ--3Z`/]sE~aGVNLAYbqQ K"P3OvC#bLb<a3.H$ OQ lbd7-G'ztc G\ dE"u9:ke` l˕IE"!C/m0mWMy'"|Ⱦ ߢ.{"Jv?4U*tݳ #`dt.2W@Qc ƚx~`zMu!JX:[T4 [f~kZsЗ|j ,yzx߮#$oܰfK R4hhA>HWc5r&߆OqkzaujKq8L-6)m6>>x͗1+IPvBZF.UڡpQ;[t!jc!>gH't.B[dLYGlC"'Kr"A+rvKWv(7N,ˍ'+602]B+2ژ)PٲT`zvJke9$^80ZC3R wEcX+1}X'PJP%&%C2TJAPyl~jCk!htXN-P8Q2{^$d`[{Q*Bb/Fbo4"HB+$t!SFL OG9Nh(HmټĘ ,^}>%\Ac@*f*IY0 dd', 1YBƸtcSn?Ϟ `M[Ͽ ~Im򦡏_'u;k8Ԇ@`G uz?Y{jVoY~8yntNݩYg$T}͂~ǶѤXtϞsw~ǦYMˡKvqh8hI۔m:pf[Gkץ uom2Z.2MUM.|0u\[_ηEn UXq}Ev&[ս>^!P'P3o?/ad2e$\3iQd,% }" "{y35WL`m5`Qr:BJB0ËiܓƏx^xa46: q6Z݀ >MPQsߗO7F<3;w7gA"iU FDr^ꌲ/$h)PǬ_NQu $AaP."TUNDU|ARr*HAs?bxyϾ;㯛~McdU; b,.Lho幁D}td?WvH+8Ax !IcHeH=Nb/ٝhڳ-VohW@kso#|:fvtvi;:qt0,=8j+cԙ:A/;suR;.#q6Gmbw22] c'$G,钶F8@6!/SJ{6ˣj5_4)Solj_8 n~j2}SmK?[_ǟ[+훂ƶr1pzCn֟νNnn|i+=B-d͍독nkK^9=o>mn CzI'zi<8YndOaVobT:@z9C`hUZw4ZMv @z${,AuYﵵRF!Y'\ˢC&PK(lj7m$%nQ(`qݷ dߏ*ۉNَ#N3I%WTÇ*,LFVJVAra8Պ}f֕@q9zQ rmfjXphms."[kd\ NwCt}w(5x!cQE:99m69Ii \) (d )J_w3uŴo$/+wB%oEl<7|ƉX f@hѨt" 98,.iGF_tb(.f]5ԿRgcvGs<@A`qG3qW|:مi~z7d]}wݟo쳛]lӢǷ>ٞukQ[G. ?S} q5[vOǏ%)*oN~jPhu 脬- C!i) VBZ#Whdm* $CO6ZmRqA&xMN`ϯn~pĖd L;%iA{6F R 1bй(Ȉ<mabPVPV>v>ZN*YaS0) DQ.[eZ .Svڦ1jGrt2J@Y k @WQ1$Edi[h2>VR!33(:lkRd6\ R8ƷELx˽$~슈1"GDlV΂d֬Q+d@%TL,fFe#u1X$XyvK6xK;j ۃQ3}i ;42Wu,RF'lK%"hL$;T"y.~?dsPɌ|>|[]$WtO~C{u~OgT2/w޻^[}u_'.7~5{ֆertR lޘ@d*'!GF:HԺKyE%+-3;HcYb,s>iiRuذ@|?w.d~ϋ#u@A-qsuz9gɟeQz뼻Ҽ BmP^ZυqyzvF/z1[WoIS`2MDYec:;ؾR8w}I0Uj@Ϡ%EҚN!)RVxvMy`q^:Bzz9k0pN۟"ƒkE]sWrkܶGkyGw_GOןrc6W= g;!z]7ߝa/-μ2r>L֜oyt}{Fʦydo8qv_> +wsѽj.o9tI=n\4rD)mݱ cKZJdt2>~a~MLgWbkwL ~bDtffw( -LX{ߠla펲 vө%"UAjO5,U8rtH:a"QpZĦDT"Be#J`. lHS(t$=`(EƑv3q`Y|=ill<g }we"| 0AeGɱ#@z-J{V8'T4ʄPhM pPG|qK=]-.e> Rj+! 1Q*&Χ2 |IQu)XuXnq,m/>]Ȫ/:ۑii@3t̪>ūo/.Z])RhR4g,Lr.9=R+Lgo T<`r`)̚? RmNdRNA#mpu)Keۊg-7#B) M"^$f&5'_֌o=j=gߓbnSZKԜb^^CwQ}i~'F (tJ3D3ŠJp$tlxS4:?g|r~uzZjf}X5pgz%\Nf@.pp|ڽMEbbh,,oG O6((FN;{p;ՕnE.5m> \'^F޶U^ |`Q5cD!ܓ%v,t kuy+ k=p7yOaL66cqZ11q־K̖$zb7_Tx'؞YgܷY0D-t%RT\)Jb"JPhla/VODeS Q&Q جDrO!I(RKm|IƤd"`1"^3?;} Z2C >\gH-5nb @s⧤B0 f|ə$%xӁL+-OU)|"ǫZOJ: 0dљE3) :}86ɪXH1c,R63,:Q!RR`F֕d+Dؠq|wsː +rA !{' ]jZ(I&! ?*F-o!f~lUZT5%b<{&?~)l.U=#Bb 6z w ?=\߲2jwZ"av<0J5Eʊ+Oh|&rfP*9$|qNdv%C-rV" x) Z_!¦Ygm-v{>'RKtKaȨ.Ű R$%A))eI!&ɉ²=jQYx (0Ie /$Az1 X+`tfA@B${G3p^L]R~1d)a(3Z9'H)[ &oL~%IfQ[0C۩eΡG82M2E!',uUST/fd^OyP 2qLUm(>a; *;"F?dPR3eemd9*0 Ւ-%)mXEmڤ;A79])B/^',AYRĸŲZNJ*YS|ր#KYF6V`&ZV ,Dp-mQYaAY&Ve*LiSvP웙1{ ƙ1)^-C RXDB2d@5Y+TRD筍E[C)hl#xvu?Kh2 1UǑ+3QD/S3ȶgR1a"+91!{cTΠ`F.͜2^a9aU0tfQoZC6uPJF*EW#l/tiXkmȲEڪ@>"<=20jkTHr2S͇(MQR+1lޮu9uE]% Ew 02mDyYV+5 5J! R%8FUnڰkMk?fuXgLD҄~ 4H~Ev3y2uŬUIUPŲT B /0- r6tr6_Yv " nPA+MNV6u3FtئuU ļbPQJ{a2UWҥ`rDAKJjWAyA3 ?XmAԠBG1 Q$bDO W$2+B,ѰuP Edu[Mv>NʦL@Yk!nt I`d*"mtLo܁fqqպ`XCYkp6Ѳ`09q=@(NF~7G 3wu  xrH\W/eg -s m堓].1eBAu GRD]Z Rn U2k56I*dOO/[Ċ"D W}nO.^"0Ftل,ϸ `\ة !*-JAJQZk01t׳Pu`Q-0 BE ѳdVlIfnd'kЮT g@e<(0SLGjJҸh ʵ5 E]_MgQ TqGQqHk PX?˙@Q̠:t zAB{~)QcFr|d$r!wks !$jUlU\|  RL莑YY 4EL DK()`r - ]&y]*H] Sq1gU7B0i{qt2ցވؚFLj *(aon}2VĤ\MD+h+ȝ uͮ>nhZ[_$?#%P >b ?"u>u%PRWҜ AJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%׫F?3gBH jW`AJ'p ^(]ON_џ~˟^&4|C<;>ZՄ Z\Qy3:Ջ `B!yn)-sʍ [f\"V2 Tk`VNZ+s}Pr`M^,0XVOkurRg6UTC!|4xcY-|9 (- (A61 U qfi~||kǛ߰0S:.d=aps,䀁 sFYߌf6#K{[8-,ыne(ƕ܂"3K%yuto'E0[W{_J]ڑl[fHKe:vuF}ŷu=L.^X}Ş$?/QwOӓSc}ǰ8ޜL>m5⍦^ v{*8ǧ0;BGOY8/[_?a2/ @cjԽTBJ} io=m?lUܪH7yo`0#eq" [YUc]}Vg2/F@]o4y+%]@4uhwoQ1۬PހmqAmq _k[8u5dQ{r r r r r r r r r r r r r r r r r r r r r r r ׈53%n2h>=p:o%6k _B鳙^O@dCh&9N?od)LFMFu+Ga4)#kb罱#tnY5@V>S1@Wfɛr Jɔk4 gsg=)i1<$ߍ^t\樝g.Q;~MMo}}qJ&r2I\a׃F-π2D}f sm[\Oj=9OӇ2KFYkO90LSP/ iRmek*-nqj Wm oZQh- Tc~|QU7GlҵU:7I.h0zK:"fPڻvJϋL`N-תFKm5b讽>'amнV>K +.mv<Ϗ7 q]4=;4Ȩfe2Z΁bremtpq{dz>پ[5wPfg׬).MqƋK}o+EfX[M⪛K7 3&?_Vf}}-!Ñ?x9[rsjض=#d!8,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B,$B⫵ ݘ?#㠆s8@AOF)y|Nك8%h#;.Cit}Ulf1;=i|l5O16(Q[ u i 2$NǓ*bo メg_hY̷:hM͕NHi}cQ\'j Տ$^CjW$&q5I\MjW$&q5I\MjW$&q5I\MjW$&q5I\MjW$&q5I\MjW$&q#ƭVx;[M$[W꯿NrCW f%.?߻Zlu ZpQI 5j1xv5!/lay3C[&~ltf@#kk9[.đ4aTA! *>y@{?.jn:a+ipKi$zq&dmߥChnGwogéQ63hcƏ\W"NLKnK.K7_OV;rr1|n8SM:wlo<Ӫ͍_wB*B,Ɂ [394wC1U2ZC2͕q?JyZ8XfܵkZpQY:ͺmx_iq2M59?/z|[䶘!hU:AY1%uiSR`y|?ү<[ޟ~|>` :6DD#J==q'v]Fv}%ڻvd)UFy0Rl|ˁrMVl^61k`հ+YùF{a J՞V?vtfo8 ~PE{COӇ5g/ڊvGw_nG?=s2ᤛcӈq? qd#(v^;7|9[Gχt|wϱG? zizN~És(f)|y%+W]MO~Anw4#s2gxxO=317͕QV C:>_ծY7SY |XRy_2EX|$k`@yLʳVc?q8Zk_ii-SO""Thc_c**~Yҹ.A.+ [ U͖U.BA0G_>.ng6VA6&29i_Oro)-@dcxꂬlX P1mś=2ιRFWm+Z{&6VP镤 8S٨5XNiesZEydyզ݈#Z}H#.m?}tǯfˢrUwT[HTForT$q =+B*}w=Fֶ ;iX2H2JOພsf}P|9. c4=;]e<:q8st\:r4i rW٫}/ɇ|҃ijvp6o,FdP6ݖkho iyi!EtRPl-3z͟?#_כnk.ASct=o:7aٻ6r,iH</{v; F&lXU%Ydt8X,#?"(gXIԉ2ʙ 6[":ﮓh g$ ڵKQ0[ -0hjR_'iNZhK:ã8DeEoPKH!'fgU3)Q娀|ZϺЛ,*~du*v10yYnymjahW(QlK nVচ1^^h,YsY ]W'{V;Vdy1MWn'uzK6N8mȎm6.t[~xe)VxtOc;Z,= ,? 5x=1c[nB>fP ͗ko:QU>-&d"sLxʰLLysVʸ-s{!;ڤGNPiYFtN z*j{nCSՑowea{Is@/5'' r*)`R9&b\,RR4=#.i'ՒRm|[3<{nf >a4H9K?[t"k?)>3hqLS/TB;y8J]++"@д Em+'AQqJs|fX;D郘hS݌yW,y"Ԇ(@ :eHTy-P$%OCRۃFl`h3c+['fyGwYilTtFe; ;skcAG# #WsU(84ZqE$"BDkk)[ph=N Q*BtEg1qE z^BXx8yЃklBT}Z$!E!ޡ/g,Oh%Aꊶu7#m*o*ǁۮD+$hy)\eJxOI,VKޠRc^zLX凗ISCt5N^c_"S6A7].]AW.7IH(q{ʍCKu5Myqt2gG ( AlEZPiGI%QkAI@Yl!e>v-J >x Wed`*QaA5P,Zn$ `d{n%,1RuΥ\h&6F vpg.eLeh w՜ 7M.NV \&'Iy.U(}0ZsJaQccÈP[ N<1 \sI3*~=Fȵf=&"o3VA)l &,$m@x65̓QbPlѸc-P(gqI{<7l$Qt7:4z$ y\0eB$$/|qMrM($Es.(e@xˆ3JK9e)GQzF}ъEV2I` R`"<굷A&&eAF|pQO&Ć3,XNcE h_c٫iݭ6tH~%0{0Ih{w yIG9ƹ*DK.":2|Au&ISfU_|K焁K%u`ONeQ=Ä;j|5q-S+2F/zBӯ>(姽fHj~KԖ:* ;bdW*hsD7~һ#6*r/6y1\d÷-˘zzl3VZn ͙c]ugaݙi_j bۏW^9<*&ey2-dI7K#K*G+f*p>0W‚7b,qUsP?E#^مd6T0!0 ZA$*X&\z%::;BP؈Qk>IʘyV/IQ(ZϔT9!y|_=O0©.[r{:e,3{МI^, ea, 0Xf`Y40X=+ 0X`Y, ea,qRa, 0Xˢm0X`Y, ea,&%R]3T?3@!-@B ZD K[!-*@/@B ZD h!-@_D h!-@ J`0V CZaH+ i!u@F !㝍RO BJl>η9ϗOC]P,3"jG#u oٸY LI Kv44*QS4?r> I~8 yMnl_8=j"o㕟]W/g5XqzQ|@S? vؿJNZ]P/MǖԥgkW NN_LjrwY\]z?f\7IMTZg vN{(hhr2ҟ@ |ҫﮋ: A+M Z^9l%)a FQSTAQf^NWĥTL$1&(kW˙DU s,P^A9FhEk{_7ïLqvY7A87jy _:/DW|@0)@8bQ3x)4.OŢfSV|Qn 6E׾M:qȚb v$8b Li:a:A$wC4& pɃ0Y":tDU*TN8S%s*g:d[냏_AZ%XOYS,^wm$Kyڕ6~ivFGG:Z3/ʫYH(0``Hhmc*I""Ȍl\ g+֐Ȝ ~ 6?Y>v tף_s l]?бC|m\/3{R鵽Axcf $c ꅡ}Z^l,<9ͦTA,w]3PF@^kjE4[Qޏ 0]2.g-Ӛ `1$1;c:GT:*;՝tpXrXxSMkf; ]Co#e ٰE 6Jb(C"`8fQT uD{gA8g8QWS#Uˍ_[@T䝶~nyLU6F)X;MLq'[ <pF%R 69k,sugՎUO"2 C!,*5!b/iK1cB ʖ\Lp/"dXL?G#VDL\ }p,Lv  1pK/@[#M0Clg$ ]~G%Q,j!+nV8h{] L%՗̏7;QG:^Y|{ ~3L_l7bz,w@TMRQJ3=>=ȉJA x9i~לY^ZgXɨ $IqWLR<1L$8Y-eWΖueq>H#2r|<,8i;麝*rhMfWB6]^g1^whskh{qUahvChrnw _e<絖a:oyu{>Dڀ0϶=&mjJ_BWeC+v0%~g{0 s+0bF"cCBMSظY)? ">uraV.C;ԮRcn5Mos}C*blq2ABO%jbhJ%!9Byd&(}HJU!չ}YMDwb/8M*j^rF)7NF#1OGh4˾v;+ٛ7wMga0v ci_p|=ܳ*?nroi>86ht8[}l|iuΪ=mz3ȕS8pkr{iA!er v~Oh@T]ww,f5Vd]-î\ٰQ\(^M͉CJL;y*'ԇҰy .tmp0^O^nk zh[pA߻R#\㎁i@ ,B;}oq~l]wT5̶EjX teΠM8xsfGP6O#r -첋NY6Yg!JE_-4u٨.P(;3B 'F?]s/' aD5J%2fHY*{'M#α@.jk#ޟB}T_De!c̄0*hX& V"Z9(`R(52bdIHJH}6^+s@"ΖCAg!,/譳_0sδe6*LFePv^* z体cEGAG C`/VfrH9+5C(N2#0`ZmM\%TFJH# Q0G=K%>LBr7g)w# K_ڋ̖ z> mv, q`yԧ2evY$K"&yw&%s9g2FɱJ'dLZ\DZԂ/A("FƭFxI=)ZKfǶg~;eqSGzf2Y~gG\Q#+EPnO؈2,nOWR2Z+| <'Ln]ew]#,칻 R*Q'tW)yQs:] qWY\n]eiyUR>5!w )wVUV+RoWwiܕ\RzC* |rץ.l䮲`rm +͵ҷ@`o]eqͬ]>RҺ)ݕ\g_/cq/ٕl:@x7lfӗEVow9.姨)d3' (pXcNXMՕWR%yX-5D tp[l8rFa)-nzKjI`$po-OUp4%CDD2H)A3p <<"7:P,;Ŀ-Xݫ.O0WQR!0A3\ Q Zԯ?a /s(GpRbbŨ`+}`&ߘ hx_IC`p9z6ge ٰpv4gSYM|N u!=A$rD~:|j<E,R؃a󨐕"<78 bN?_ZE3׳7 KZtq(%9N:TqaY `G-W[C,Y*Ts!l$~t?Gܧ&Mv]rNr̈K>F'ɐ&#ƒ(ǢS˄P˘Ӊ _.O`@Ld=fjE W2t(rMs:̪zo3WQ]_'QJ*Rib_6|6y^vyKj^Y|Ej۠o0͙eʷc`3N[;$jy|}wlst;+eچg)st*bOg&fm9WYn]qd_v|r:npŇu,TZ%IE_Ǣvsh}=R.jIO`&ԉ`pC`R:*wm$;r? ,qY,,eE?)R S=|EZzxf83Ss.'FlB%%bqEp(g3<<Ra6Juƣ f8>P7M'oNz=-P}z= zAg+)z{Cg\TVΘEi{o!ұis0]$`Z!)~ z>* )lf1|yֺK Zi:@ɰS"mrFy$!Bu,Ibkߞ+D[^Y풤/كu6*;ٗFe4Ce٩}xs+#/Vyr:.<ʑsADddD Ʌ4[]xze{IQ| %)Jo:y>2x (1L쟥)yB:8\u/2[л.K ׋lSCRJ%a})ZP`?F, RP+[jɳaez) iI_"rV)PVDh͆s߉z u 1* EphY5?D:ʬ].DQ jIht18@KG>N#r}@JYv㓎Zk j jxIO(i/1B*&eX ۇL?ZHބ\!%i`8Z4L&n|?>MS-z}&9iTsw )/W4,!lخgU'b_v@duWM 6J@E 0:lsQ!9Ш`H@ٜBnzŃܖ}9MW+~D"!)"zH(ԧ=Ps)i04[GcL8eH*%tt>A .8!̤c^Ep\mS 57;?=1\}P88u.6hUХR1*2#t QqE,cHߒuХ!-4I^/sR<llJIP< 6X,Nbp^XpiB辚:<*  `R%u%AeZJ "I\KcIH c$K3 u*k-G;v/)l4gՠHm1O*h(Q%%+aAL.E!H2 VNSsdӽ&ǹ3nYF(]Va|~󼆳.&̍i\>2iP >vvs.[[9]>>O}\^jZJJ~7 <@7}NuN[mCop}yT?55qͭMR$z>NRoJ>?w8/[ʪ̠Z:hNf{wew!}U4 I]<"_ދh]W~#58}L:V5oFH-ٕ8;YY2o )g4jCAXQKQ$@L?Y+=\T׋Z& H|?e/ӐYfC/lsz?ۖwb}} pBW:gPvɮ=43 bCT"&k懪'_ڽtQhbE5{U Of_d6C2X Q`20fʀ1T5B>:V*m) H2Jc7^[K͆s?8mN({{Ko|{>C{Tn'8@ׂ>^@Uo_u* ',7@`Wx2)y˟ ??n6`k}?NY?(ypyng:%ITb-X/##WEJl{=p6xAϛwK  }j oĮ+RI KCW.8CWfoT.IKO˵OT[nt'fvЕu >Cz̻.u!/}xwirBn6/W-0TJXtg ])w&FŌJ䀡'UÉq$:ٮF6^vc=ftkq<=kC5WX7+&-Q BS.:r& "u2 6|0lWU$c\T<٧ȱ^ZR&GڐDt*R1 TLFٶ^BrY)OdL7^st:W;kuiRhKܸO]fb& `ͭ$htC/v';&]nF ~nȆF.(Om{aUyevzF[[\~<k9|ѥqiJ_-D}{n(4 4?,svl:Db{Boh}5qMIXgLӍO5VF'\t{WUn(Q 2d0Nd**s F q#@Fs1W\椕kw`bg;hǎ;h-`)B% >1M_*2up],)YP%i2JPDvFhgNoHh~^4ͬc ` ^ 88u3c[1^= ^sU9_ Py x &E&8IB(΋(a3@=|K-YͶDg A2 M #h[t*:s@e09J.+[Q"MO!+mͲL'-cVIYk ¡䊾Bj٣}гKiI3|CIYh?4S;)2[ik&xR\_[Ku*'[}DE).''m̋juHN) \rm!bxO'͎,Ij̏|Rɓ B}RC(#rrrC#G!EKIt묱U̱$ )2NQ 2ZLB"k&%4>궲2U[Ξ|_ωj}X3g<+hSnR$Fle%Bm8 xhYJ]y͠c,{CN6z]Z`D0/˙iR 0W jUZa:NZ2y7Ԍ 2dg1Ym#}DH]jpO!2Ӽҝ"8&iz{s6M3jAM3=/oEx7GM'fX֣#d27Ʀ`V#-i'].DqhFcnlj~|-Φb59Te[ ǑQ'CbLh9QHKb QʂuQ=`z4WCCW#:Ao'E7\4e׏{ n=iD8Y$$t&'J6櫞l*`i~J (Md5LcjHx0]0JP].ds (ctsvت9\1u}N ؖ{|ܬ.NxnڔD+&!q$Jwaii7BmRBy -gX* ,JqJzc8C b")t-; V[Ξut~NfGW7<-)g9!Rъ,&Dk,3 g{{.?U0ͪ 5h08/f=WfʐKH$I22=$ 3#[jI+! 'ЩRM{u`32O"Ex XiC,Gωz!BZ*fPtT}rةFDH0J 8 ! $#$C JӚSh\*>ϥƙ 2X*?~wV8)OIfҿg׃_Mat4+?etia%8sD´S cKo -O7%jV<9ox֌孍$?݌Z_I^')VWZ kg| eR?o8so ɂ.*M{nTiHz&ޅ/eP@^~-XZ?ɒHq'NJ(u VGOq BDM&/&S)i`O,9EƒwO1՚^.@U>ۧ͟”i8m?fEY[y ܸV.#Mm>./t0)Ǩ95ͻe1[Q!7gJ^$Sj@rhA 煾 DŽ6atzt}R\,TlrSYVUVhNu[9U$uEjYEA@͝Q!dOĘ-w"DGjPWV_lk>Ìi{wB \b2ֹA1є" 'x`mdv*ԳJFI!^7&K.uDt>ZIwWC~,O΀Ɂf:h@i#}E,61c tWt%Ef7Tg%Vvu[Nq#/uz&! 2ϩPJ}Ps4h;A.zwNfrXozgpt7_ڟr"k Gw?]t6ta1 \UrDĻAelIP+Plv]Qk*RRioWi?oLAf`Ҍ'L0D!o1F{/LșT9aPT#UO`Rhw?q?G|iV~o|ON1&0xmJ+.I:-W*qY39rDRxc|?4z/+3S{r]x>~,Do^j e:iR p#|Ɓ[ȑ9.=јC+J,1E!l$_J-$oh;Gd&TW['mw Ri\NYkF'!/h;[K{C`Ƿ 7!]\1>S& ~z'1U1F &ˬWR$җS&o47>8i]UhGJ, ѓB)֏1[/KL90E ZLBdݒq;J9,FB, u' * +*6-. fۆ»3&04n0 4%9'! õ#fDX>u!02,H6 uU]*d )ؔQ؈=fl&e!DM.[b0gR:XYjc';lf",M@qdd*w刴d"7mJI ^`ƺ硳t"E2!CȊ2!ɚX @1m|:R7]rv֨}MqϦ^-8T"t[OjÑϤwў~ JFF*%,J.?W\ 3n?Td'$HEQGLdICej- s(W;eV\)dVhBʧ| $[>kr Mzw9K{<^hS腢HZHמcgl"Ō=$֓.je\=eK?*7r~V.>`W{39Lގ/RjYǣKGػݎK\ GFvoT%>K0Mf,Zp&Εm}ץk3bO%$}"[O&ojkGs!$r}Eύ7w+[5ήgzmt~ܧ:qtvOnyEx0Q=gMez[lCr/ m-.+ȭ-%i)vo+ գq뻯,A$扡`;?%+cC2W2c| qU){OUٛ ^-g89&br`Lk%BX,2(^59 F$qg3 )Vgsx>q4STw5Nѓs^7mc~K. l."Er)rq:| vɕuCs^D@\̐`䲈 *3nw3"1 8FN`>+Y+wWDydkQjӓa+G fΗ5ivK[L@ sz_LyʏcarmUË݊mŴi%*{ &!reb0a0"f+ȆՌF0l./NL+Tz٫ƴSMBiJ fgd YiMw^v':喖ۍEd^;'J91jE規WE[Wfntؙsfp"u\?3# SOav=TM:2=-Yf]YwBɺQ4+:ݻz~yP> 73J 'F?]2{B ,L1Y3\Fͅ;.bN}6^QgcƆFQh`ͧT(^.l꽯uRPr_^T#ڣP~.GuHXG\踆dZ㽙H;i iTfɜ3^oDzɾlQ՛$jIvө{2vCN$LR)6RA?qpb";Җ.1-A)o%"1Dzdqޞ"[|jof>y0ܶl_FP0*](EPE*Z)c;!B:![KEBD3*ثN.OBvC6 5{nB-:RЅpH%+wqC|O\5A5HGF%m3% g.fRV\4fŐHO:yĊu {a c |4DcYE,aR;D3Q7:\+&V.}0pn] ~96u$%b5qn2qAd@| HY3[V*w# ]~pzDgL;K6sTW9"ﭦn}WIg!ބ9kQ$iJF3 )R(SZx1X]VT[NLQdFgʧJx2cO`赦&T^&fMx#Bh8u^TӓS ^[^mW)?-^ys8c&riփ\3.F2#)w% hK{Zj )cbI#uRPNQ1%c\F'U3Vgg\R iƮPU½o|ڐwPW$OOnY~sՃ?m@`0kp5ǜG!JQCtn)˔Cdh4^#P1I&#20EN\6AW5Qg M`U =6kcm .⵫iǮ^[Wںy`i G%V2Ϋ$(Ƶ`\bp}U3M@KB)Tڅ#dD 0"i51Ѡx&(1RT'\eXMp4XM?vG4#6N4L;hU!k9RYQ1 V0JlVgP` ɄtYqsA(+{jlQ~}a1>uVӒ"~6b]_](6p>My&^wIXKIVY^-H}JsmwOsLJ/pHE.fL~e4xn? W~O`{!Z(O\s{J;Kez>{d \̧xbgqAw,4uo]bT2+=;M\"{B9eNgT< ? o3<\s7t1=4)l̟G;AΞM0~at;:oxgp#Wq=öK['gg?'QFGˣzǗkƔs6_ҎV̈pH</ߖvҎRzT6zQ6R4y**zLA ō Պtt( u D[6ySo &A'}`f7tw]oYȵMȝ+qfk3bO_e^1Kdں"vT+GƱvq nx+h8\fNr̳|Ca};Gsw]A6gMw-6{hF'-0N"xDc0_}(%_}=e "1O U#w'()Y[Z9ŝBm/Uv 9Ljۈ/ia͛VKTt8LUC7|2a6_qoXl]9fw:Scgbϙ]=qYV V 8[0*=umvXRS5x8p?dٙufݩ &"D t׮k~tyCg[@0(j8x+-TXvJ -<3L0@Ggs5&P#p:8xEQD2!c4Rnz񺰩{KBɾzR폔j0So\D!bm#w?/siՇfCd"qSX%s֯μ_^&EQ奣D/;ʲzD-n:bOnHԉ$)1V*T*F'CL\dǕWR%:(D$UޒU,SdO Gٓw m\k}eR^$Eњ2Ƹ›,ËKT-.PaA4b N!"y.n%8W'*݂#]jN" lL!"RHHﲏ/RZ[!OXH $y+"ݓ+D+6S"pVb&%1pak tQu,𣏕^5ҫmLW]Z}0d* :ҊA[pHxsSH HՆr 1'tTl$:+V &茨I:`Y4xˣ3nl:NBR2.tAX"Reʐk5Ƅ̽õTd =4Y}*^-vWމ12&eq X?Vu_S|4y$%` _Kǻ}}`Lj)Pƹhge[OZ]qdLZ\1yΓw\0yxzżAB>wd*ĵۃl>U#M=_,X_M$?{WFr@Fۀ[,$k,SBvS=z? W P2{+7SoM.ї N.|n.Y^֙QeQ=J3jp}+QE2'x:)uTL'PP]{A%}6#6U)+` o FP#_q@VNx-|D[B&\0/D2DŽ ` hمl L)R 8IKu|Jgl7=d) =l>Kkq\w h(T7@D DCY1O'ifU1!HxfhY"%^r; -O\{9{R EbgvIbJH$F;/;3/פB'$3r(($ (,oNhQcAbs&2oNw,gr1]~6Qk/g]D@ oѲEr5HFڜX)[KQ h設%{R|/?W;dEK1I~~69o\Ak=c=ZjutI@mTlRg/Rv%DTCl0DqUx+P/!v61@&ZIXt,Z [6X {Rաd^$2֝Giw50].y8l!g79x1+I#*6=ebZej_)cxᐺE6V"sJx@"oeLƔqzHdIN$wEΎ| <>MZ^nZNmf,IJ>~:6ekq[;g`vrf-E[5ǹz.lMF%3k?1Tû_JhnWi$~8m旑O̳eV^nhUxn#LX;5y/߆:5wZF?7_.\Yfe3zc b߭pˀ=83G+ Ԍ՚^Yk%jV.=mm U dc ڔ^EYxTYKI =>ynؿ0}ayIjy_t(*lR5CAFE-mv) ޟډ?B:$nɇ ܛt (xE/pO?ugE%2FվKEhaeT(,gysWƶV[cM3|v9QrE&h'-=. B&_ Z&14|5ҷ줭}-tL~b6N0g^o)c(t3v*&C>(DBT7N`WT૩(HRWL$U%JcWWLԫWԕJ9uUɵTS+;vuUTz Q .SoO?nΜ/fekh-u$JA6 ؚ?JF?xjΜ觾ctε,%՞%@T cȻhC-^%JdiKh>ug k o[/ f2y3wǂ>pDu$ gn]mvelt~a)kjqRҍIB6 44ש1!([ I䛳7D^\Z1XTQT*7Q+$pP1 8xH!;T8Wξ5>Ͷ!?hX63M'2+^cKw&`-Ang9GtN7mcЏoՋ6ڶfhxjz.5U3o&|⍭a`wnz >zڎ _\TٕȍMj6la3mi%Rg'/t!$i l1 G)B셷#Av*Ϧ5btWYe+Jڸb[crm-Mmz0%b2e֫i~XJ㚋 /:0tL`MP5! mo<G=]V2>;VerMNZcf2MLtNf\YKaLTtd%]ZB{wcHMUXW"x[Vˇ|V{V}gSn_yOWtxݣ=_58*Zq+w/_N`u _XܹW$|Ĭ;9Yΐ߳:!^o\b+ 3Vق˶v{BPz_ y/dȞUכW39DhAld ]V hZ*oSMAw/ބyP]{mQ@H5X= e!() Z_ RV>У''6=J,׵;,}ZF6n=z!He3sEqB*[GTJ?yi)%i&.v~m3.q^2c$ϲYYf#f駶('N%헮fgl\Vr?L9 w eL'6SN(Q{) Jk95f"3WbǺN z& {vqǴ$N?r25J%c`ء-*+ga$NX-#KC16ٻ8Wz))Ȉ X;= <%BMjVkodIV3IԨ:+*/"Hf !6-XԦEJ!LU=5({E;O%r4zqǡZ:km`ox䘃8X%~-PtƘ{%r1VTBToBjCdAd E%4K{B̵էS](9wÕS-jq,}g'8i8^9Igbs "Q۔J6P egm"R!h vՈRH qq`5W$mPWLY->%[-:knQg{u טbݸ@^I/Nzq*p.@(:|Ϭ!mY^S6rldgzy)*M_m-/w06->^ "4.۹8|&AֶRBCd-Uu:pܝp)pG'ڟ En,ew>f/a<#lqѺRnQTZtY@5HP4V#;G]=J&ڌ'0BVK؊Sa4L 4ʇPw(g5:6N6N6Д~cO`˨C7Ph cf"6uA.lJ\vK,.X8xUszd~n݌W.=9]9].j-g_3.U VqкT؝iv.;uhci;nq}oNjgw㲱!-kR^H+j/Ddq~,iHC.gS+! ( L^Ԓr-iTUn1@#i?RћUAM..Q֖Mlud|d)K[TjZPe \3{8n =\cVj -Ckة [HӔGH^#/1b:uwϧ35RulM+ l~}``|1ٲJRb~|A1$zpAK(`Chm;Lu&fAFMr sX翕xۇ8?z%(XQZM427l2?,q`r;k#ZARUN:8'HVȢ(ڸCդD#4^ް&8^ c PT]k%T1ȨA D򕨨 ц>[UT|dN?.ԧw{߶vjCθ}asǙl5OLZsI!)$FDbbdSM/ImPKUI<ٻg |)% r- .'5K_$Sj2k$:떲5\?楯{߷ѵf|$l [(j)i';ּzNG7v3*؜$mmPx@f;:^],&wbK\į6p>r-\pWN_0x 4"z _/7OnkJѮIWO|w7}r\Q6SBt\8El1&N 'v˱mloocStwy?(7xWj3}ݧmӥo{!]&K-L +zVJ@ dC0A+nM:Wr}?;Ǟ|Mp_1yOYzgr8k{!fs/ɀ*i M[/Xz\;?' 9(wt?"<_{~Oz?jXjͭGt\(CevY,I JV 9.r^^l/~Uɖ|XJ7ʱ$tsU_nMk9|X~ضc9\醍|_g??Z}l͑_V@ؾzN||q\+r~iVfLν&Lqi~ UpGˋqӋ?3yar;S4+SWVsY7҇ +O:bVf#Q.2:E1r@9@6V׌PB\r]tICwX[8Y)6Lq1):I=n6:٘]v0_}9egcFQĦ=ʗںM/Ү \$;ojV慚QXh=&)EKUdҎb Qah]!e5IrIvI^eg8w1AJ  k-eƒeA3c U%c E{h`x1ˬ,d|GEL7r֍=l%WS`W0jk&hrVWHٴ/YQrkX̀ɇD :V$=EE~bGnY9GW!C4ZeqZk"h`d'M~҅)m) _E?ŽF"$Q%v!>*QCJEp8FU؍'ɾzw#}ti"GrkjSsITTJ:LQl1-u AeR:N/""/JE/GS~ۋE>,|L}{K*iq?,5lYU0M Sކtt: { '~qƆ4?4!J,{ /aMզUח )ꔋn^S.ݙA-]n0=MnVAmnjc-WW)\V5 QZGL!yh`J@ ٌ!jzWO]Jz/h6Ώ|<ɜy|8B C{AhC s)YH}9d7HR1*[ yp`1.W0ĠLkf7:EΞa|>P3|v++s1_0r^\Ve+% }ժ*SCT}E*!sIx]>R 54sQeys2$ZHK :VM3Ğ QDm kqP¸oIYVqϽ2[bPP):UyIU%.3*B,OO'i2Me$+)c,T"ݶrrJXpb^RYP$H}ا]hѤB5aN@N> r,.8]}~y~]}(,}-"::YlYyl#xߺ#O3`0kKsmKb*~.e> $]bj,Α88Sx'v{H[?Bԇ2j@1y7T7_p Ehi qT;IJ nRz}hI=%ߪ-HIq&$Vz-wFz9 LY7Eq^J R(sVA-m=4'= % KI ^И fkrF-.Hͅy)8wy.ӐUj͙KbE-͓W_UWV]٪+[uelѲUWV]٪+[uelՕUWJ&(OE?TԟJVTfnQ*?TԟSQ*OEIE?Tԟl+OE?TԟSq6D ^ԟSQ*OE?"QԟJVTԟ/OE?TԟSQ*OE?_ԟSQ*OE?0\W`V|I=K[0sh7ܼۣ3 y{>Կ^GD.Gq__|Ln5W?g~~Rn$sanM(,ݥgTmY.4g78zG8|_ AB .//.Gpm'Ϻt/ 0:oV~]4>7Us3|Ow?tu{Ѥ fOcr܃ػklԫpy:kW/8_l7gCX>~ٝ}ɓ*c@[j3x@i$Q䫴]9AfxKNWlKOvœ-|ă掴'5*CzM /g)Oc.*5XgKȧ,?`P=f<C~%qoz'nԻ=%~ Nq9C#d٫opt;\bm?&'ܰ5̲y%2&ZMxoKԜؾ5y6#&Ki_$iAS\>iop3+V-嬫qrpЊ{+(wĈ¡gγ٦tWʿ>y9*C0!B*Ca g՘IDk1hHHIsLP6|s86ї.ػ J\Î8eYmezG!\ YZ[Cs S@&y Q7/ґ ¼6cXXḢٚAٸ)Ռ*:8p~P80ux>x}%`LN٥pq;CZz!'Q'i4")MaiJx+XqDL CVr0Hqb\R>jK )fd j0DȘ9q5Ky,62B y½bE&iSvI.b ]%Mo Tf0ϯ2̈M| J(vsC@dEw8upԘ!Ɠ C(IItY$R:""UnY\Yv4sۢ̌ڲva+&ziRL(cEZL&TW:(@B(&P\H)b`!3 hG9AQNq )#(No91G7a<'ݯa`n"ʌ bAĕa\aB<3B`BII\ `0s* l: ń3>XXiHL!ٚoj XW:ldK\͂bŕҠq-cgh LL"؊qmQJ2+D@肋Ń95:Ca3%:w$\6kFe8Eq+C;$a6ߣl5V,_YHeĮ=~#;-۲SGCDx#'rI*kʣ~D5 ͙$ƃqHNDO76u&v-kVͫi]& DueZ]uB q1qGOmtO;Jge έͦB(YAծoj/.~JhE}ӫY?&c~#׏Uboe8!Wd On!jLX9 g'`Slc7[_6Zp /O {рmP`ZYJL%0" banWw+|ɪ{6ODbE(xM0f A`03@=r>hb$y%T8 `aOL.Ν,s$#lYo&Gpk%jxSM6dTaCf|.|nV?2SI`F`'XdwGsI`M.2yLގ}s,iCjI|5VLFڔ, ,v!)#7/dp г7fD=eEJqXt_ (*`+搮,@-cʠ}[:^o2 ZGK!zƌƁZ0cNWL# Qk%i%ݎ) ݔ>ᩫNqI3j$mmBTOpNԌ]OVO_ݫ wf'Nhfw'4iwz._0zMnÞa`?jWePM_]uv4w7|i9á޿(N/YJQ^r<}˛X;edy R #Qic6M0&wY}':n%ΖD79δ$FQE@q1u=x -z/VQ nE>h⩦aIG֑:)Pʍ"pudI\3pO-J)~ј0[s֟BW }H>;H&~u#pw{#338S°n7%evޡLAqp~*BW 󰙟@[7/En\F(JЈ$t *xgPPJ`uSy~3oFzz^5>im~ʚP L[+44AR鄡iڇ-t:PN*{u5?~op:&ш5:q({~{WgN/s? C5 bw}]1͙R7و3'%Oǽ[rav8sva`svcv18wCsS=>0{~|4'O{~ #IrqC7l&S>䡽QQqO2pTy[i=iE~wp.cxԬ@>`J'G Qűb«LV )t cVL4}W6 G ?sARP)ƀKVDiLNǚ+C"'.h v4AaCшnF,B0#BXYL^jʈhA #(H8Hg:֜ ָ~kC=x:?}ڡx5[qu?{2qQWW˝SҊ,}5I.k1輑DyL{\G300Da"aQA B0.PpA/"HF$H-c{f@!;VW,i^⒲% s 'uCXK9(' U(0O"X*P`8.A^#vxh<9|,<P5C7[ _Ƞ+ ?J{'ai0k% \kvh.KnA]'uiܟ=) p0rYzК@Vy#0`IN=̧<$?B)ƨ)>۬X/~d]K#qHdh]Dr[v}ӌ72_iw.tiJwdE,2+'A Ĵ^9a 88]`<?8#}Z@~U:Qڇ%[ݦrݿ=z؟ cj{nB@;`X:s=?t\7T{tܤ.4(gAjLu;Lt݉ ݆ɭUL爙 ̤7ZH,^#P!0zA6*+Qc˙jK#ųC66gyc+Q}`(po{J EjFؽ4g%u6!uI;wpq셛.$y GﷀU-#|)r4ۭG0 />[sswkm4fVҹkX']j&hF·)prSi9P6AhB}D~ 6 c3+۴lx<7P'kWOW6ۗڸ䡣?Q]Q4k* 2‘ [4mG/VSN{;XMh?G[eypVԒ8jCD_| 4"aH:!`V<੄8C M%.36?zZ^ k"a!A aH0PjU9uQkDF$9;`GG `r rp8cObԒT"eR-i$aKf8S] XE{ϪGų66(2nGmthbD#\ZR PZx(&m2S/wRqJ{H qhab,}2  K̖PܐWC: v+Tw02M{hntH뙐ІGCZ_4zvh5Жq*dX6L5>YS`U2m?Nw}ЕǼk9FQ Z 1`%jVhρlz4t_qO!~p{`=*5Z\Sz.ĵ,EF3˧z "^8gBGR)ƟlK&fd'jl$P^2J 2A u.PV_c'Qc'Ec'a],BEL^䝐\z$]b}HMx R  %ON$(R<;ۋ̖ t mbw;o*Q:P,t 5A`:HdɑJjed<̕T(LDU HZ&G^&rwI UpaeqcPAKGBs%9%x+zrWKp;BJ9DE$:ͽNiY>dᔹ4LV/C JpxFyIdg 6IpyZlOP(JbM>ko9LLԊI*( s9jHƸ Xԡ+qkL aDx6,vj|QthZTtxA*;J_BBC6WnkO^(MMK)( O;_.Ok/?ՃԬ"0fdInNEn3\Ni$ "*)ރd2;b''B&}ҰhU$$ xU9s,;I81r' %LDV6%cL7rVro}3st!FO!K]D3g<KknS$AКV:dγ^ Z"PS>_(z_"Gѫ !D@rŶ RxH^:\A:#,P 7Oϩb\S&sV?~ F@dP 4dB 7̓Gѫ>x9]AN̲,7:ːZY3F[R:+$lW]>wE{8^O|He2,J04q Yg.tL%I y"FDj} 5ÛП\^X'[2%qMa(mCDk%gdA#lTʛD^#HB/_uEFb=4 dyR icL|rp- h AAۤ2Z^w< E{Ƌg6t2KXYgtI327{RzGQ- dtR !d4d@YuT 11&$#|)VJ AgɽGZ2/!Ĩ$0k Hu'' hkQXGzPȨxEIFx 'r6bBOp? g-BY4/̇g%u5}黳Yfvu9.Qac;z}30qr{U ۾37gDͧ덦4(Q)ee;mC۬6szڲ ?q2"j-aK*Uj'_ڃ})i"};z>]~q:."ߎHf)Y9ܒoUb}2K&ίe9ce8wYG_g7dS>6kV5ƒwOL 5m'z~ZOE(x^ݗgyEY2[Y>׮auxAچZpSy~;XsyrK?7_-\m-uru/TDC b߯qˈ}׮cF0MܒMo~&[_`mk na]ᘶn|Ahu[67m|$u T1Bٓ51fNH (>yj=x_=O t$Ε-hKڇ3[D 7!Z%ګԻjW#U-7?8'h~p5ȕL?o5èTM0*`<K흅 ro>wV 6i@rRFs& P~{ 1hyGPpZbJqYGBrc IF/(Ԣc dC`RfG BPRhs㹂wC5r;Iko1۝ReqJv|.J9{7go?c0l$2YGCc*k%HVǶgrd # g5.hoSmnyf71PͿ!MFiٴ%ZNj92~ԗXx8+M&hoҰx{z]f#;" z!V.e+W4{` )e:'1BL$9&2Z@ 8"Z#s =蹑ٖͥڴ9݅,b}:+5Eg_~N/d.p~v\goZ88|/o;;%qW7rװ ;JX_BFl|qR ӻ~=/qzqʾ݊欲-mu{&xD嘦*VũFU@fz=yΊ %рd)`p*:^qw^iP]9i-灁&җ1)xĨm/[Qynb[?QIG{jQ0՚݈=hJHLPrT եM'_BS._(_"1UE|v,Lsڋ9!iutJJ"U7]MJ.%gl{p= :4ejDYe0 sTka{90GUj`Р%EҚNz"I,o4CcagNۚ>;.}"7JTϋRc͵k,RmAuí_[77wϧs<{ӻ6e|6rˆ[m/wLEZndCw7︿?mV!:N>_MOE.Pnht2kjc[Ց=.W.s|#Ns>^}d}aż KIs;&PY]Öi<!Ewa~8q]U[XR5tWYs*1(>MTKb E` %ʠ*QsQ`CʾdBI#QfoUEa/µi&gKO STO麭؂-?, *;JG1W*آ/*焊FP|ʡ&`+SE[/BDq5}oX݃u :q { ^қ[bhynM!nl8͚4䰳w\> pA3xɄ̶bK$"%e$jR^$ʹxd:H=yyҎ!M]OɭM阬֥hx9,x sٸ(dTsQO^=ّYϮzvNzٺ& [9:L53ET~ä_3YkGC5^QԶgw^ڂȖ6 ^'.enifsͰm&PJIfX'Φd6#yRɑ$'km:ph%ntsO[.m2ƴ4ޠ 3xq` @r=nrj\O/oSV%-B29A["]g&m7y=ؿ0Y&ܬ2=媋Nsh}[JZ ^NGSZ)L1^ -%5 A{ǶH)oj!k i[g[״o]{-,Px-t.ISd:lKE<=sF)B`ݰT~ƾ^BHآ]yVxGnzgu\ %w c祍 ^XmnB F6x&܃{$~/</edMN=YeͷlK"VKGY-R1݂Kvd9[D=R[IKyx0F/LU?dfWpy1ۣ/H֑!g囕7y&`PV5AQ2 Ciҡ4igKmw>K]$hAK*Ȩ-\Į"0G666C|Ip37nu}2[:Cmi~**ŒhVu}~LZFJ0RUuQiExfBxҵ8,[1Eie:r΃ NGo+b%{0FJIyT% nM3v;ǵ:iYݿ5J۳b,YJ)eMfq%xU+ūJ'Y\IWU\k+VDqUU :A\4F WU˵Z%-+3}*3ն/jmq*A ):I=U#는Z]UU\"TzvUk \પZtNW[,qSsսUNW>=bJʕYju쀫w+I]$̱q\Nj qpJ+p=U.7rWU-uWU5NWRK]pU;\\%l_pU+}7:E\ޯ}_uWbq?8ɗVяy {>Q;:>xdצJ|&%Eh^9bX>ϳ({4VQ `Q =x$S{ȮUSĕAO 6ԟ@*feҞJNWJ<=9.5__.rp:,ͅOn_<}͸?_WSKnnf(Y|644nAXaEt% x\fhg{Wsc2.r:r‡?o??fe+sHCȔ|`rΔɠLT ̦dMMxmH1F2Tr$ 9:_.^s Sk&뻓p eRg_zdq2ϫ%>v3B`JF>`;mU|o˕3U5yv/l~N~ ^U:f"1Q_Ͼjs3́XA/V_Lo?.^thT fo4[vz?o{?(_2Bn\wѪ?&?U]<_ܡ]Ӻ&׽y "#h=Z7}gwS?{۸0O`7Hr<=$g3j+[HٞكSMQ2%,l$qV]_zO7jSSy+H'z+? CmJ+RN$V[3!LSaf`C y?4}p .ۭ|1޵%k͊;v ߫C’E*Lq nL!l@Ϛ |ptzXzbe4M|U^ UW_1ψo4n܎jwE% 똦Cf$y)cu\V݇g(OP1֟RҞਫ਼g=>kf3XѪ~8NqcWwև6JWɖgCv9.2MLryzƌƁZ1+֫4N꾇r<%S?[Nq֟`*.g_n6O75g bW 4&LshBM`mx5wx;~16|L'oʤ[nmvK߇ހ pW ˖O-z]}Y$ﯻXi"5bsyg<%_7e*{i7|S_vMӜuxB4VjeӚR> 1@*rv;18u/w:&r:L𭢳.߭?uCuq]Ivv(u9tU}v@\ )g$z'Ekw&tπe^VаkrD\`բZ3F6 otTB@DPCQJb#XkkHa*0ꨁ! b ??;mAZ䋶Ž{YɣXn<ێXt#:x% Z7{ :;伞oM͵=$ZvG Y_?sTޟ2jG䐶L#" S[aNJ#b ( (_֢W'brK%32tr3r#c6u#c> YƱX(2c!X(m~q&:٨rs棪/f4ޏ#61 *AQ2?1CtA H"6U42x )3 j-$& fh/Iu$LEtËMm 1YDZ-3Qg,;MSPƊ@ĵL"XѫJDc R!d2+F.q1bQA ('8FDCP10:0 "fcQeFD#b,q%==X~YZLܵyݼ?(S*#'튙IDh$.@ bx[' h]@uh[<0cFCD1Mh$Vs.Y K*Ljd<)V2""&ZH0<)c"ҙ#lio`ل+t9D1C-8^ k"a!A zDcb(*Rn机5NRiqjF{Uea8M0Nv0w;U~w\qׁVW 4;E9̮ü5rQ+P`FBtXNJ̙놴@~E{.av} F ~LuFi 04 4:U=3ƨXd qȧS4TS$jKϣ VFH]sL(QmGs:2,"1tbrX+B"sUMsrBS=~Fv?FC۱iP=vgx.G vJ_޷6b$+^J2:o Z+( a93 ys0@NNpDHܘq =00.`"~`D"1J2fZ`(X$#yp3"ا KIAW);ŜR|ưqᲩ&sBqF߹kMCzWw7Nv@;5J䮣?bYmzc:8<'儴 &I$KE 5ȋbĮ<3-7'#[CJ`&j2Jyx6ZZj9ȓqdid==WF/!TZ1p`*o|,q;)T0B(5g;uZt#qHdhRO"E#ݯrOJfK9@<9pTy6itFˬ .{iS;cpt`|ci'ٙB\^w=ߘ-]R?1_c^~:σpt`?O2jJ.tKw5,}JJZ+|S!n()+eZ&t6KiWK-ݿ?V[!hY}[\D೏-K,}ЯA$L_\|%&6%#ƽSz|)+绔 *t5} jTnNBX{\6=]5n{ 4Rēm'C+NJ)c4'Jq?ZmږVJ )±eժ0:8;T@iW3gEpqUJ\iHBE'cYE\mM/[{xOK7 QEύ;U>}]yE]U?J>nQgrLgN~*j7'"S`XI kJc8ME!븝p^0Jٔ^(qgF5_Jy3M]piQ4 `L)Jp02jNYtR&~v}S5#X5I.JN),MR݁ j< 'Ucx8""E&o)W g-΂iXĨc;9"κ Obystxڸ5fKۊZZz>0~jű{g۞(JM^y.MhJ٨^: Qɥe: Y-"4zRe-P\Vz0`C,L@fLz*ykpgٮo@ǣ4q~Ts_{YjiIٞlCY3d!: U M F22&@c>WcZ A *SAF#:YB,cEM;)95g;Ds lr[/)M˔zN(2yMK͹~fcn}#_S}l_e/(yE;2O"I/5z*Lm=h*ѯatZPz@Ӕ&jkng{vipgo2"(UI < q>2I02 J`*Tπ$q,(ȑS$eͲ%\yϒ4dbF)t[ޱu֜ҫI~j֞  PHN0Jۼ?PQ)PhdpF%Ơ{MT]oEzy_ٜ*'>Pp^ qΙq@9OCΠz?{G% 2CVڀ?Ti[ !g!)>xn8:8|g2HG#]tyuZzCJ'j)"(U;f M뵭m6ՙgSt%"*Xڥ]{St?@",$$1P'(.֠THݩ~tx.љcct6ɆmCeuEX.Ϟe\vKJ$vCB6XjN7ư8~gGP2%:9k@i%2V7+& (ې9l@Y^4+ Nb6Lo#+WߞJק͆-T[o{\{Kp}|qL@ ]jZ8gU U!PEb+4LjM!F \h|q;cp``dT!2i5t)XD輫xb.jaYfh|_0]|'QVY)IE.e=B%NGB|f\O|u+6q<8+h]5TH  [kz{>V{ O9f<s-"d!*&R,Ap2$%Ӡ4Z%D O7M2M(&Ms.T@xˆZ r,ޫĩӎp$h+RO"4:EQ 6NHxA%TAE|pQ=N+O&Os%z,g.0-PQ8JeYI>8Hf]IZIjǏQF?]~<Gj-H2S0 ]|ǦWYJFYz.}K>xk\-GUJ'i8.VTim'c`,rQBLA糄9 u")#ſ&B6mٿ_ AѸ |K=MvB\RU '7Zqm I)ڔϗq`^(A&oOHttR{ZͥV-*fi5T}(^],T–m{DҮۿjL,X;j)_uf/QF|6:{S+PUZvڢ|L%?fhOh3 dC(]eӾJrÊo9>yϺ@I@SEג'Ԅ,iDC3; /6ɩ~kp{ڞ;& h,@^~&o` D߉`\DLi-:}#|Iǥ< .޼8F.ۅrÿS6FOz.fx*,^%(5&>iR@QΎ4P}TG>rCeO|q/0؜8U5Ҹ]NPyMjG"FUCN2}o7o:_V9 EQǵ>K]\y((TP6Q'.3SeD+FQ *=7cέdbgŦ9A THY"!Xrx J2ilΚzn2ԡe|6ف=90~7m^bI;j׺z 9q2حѫ:s1[nUUk)fEk#76mrlsչ;3fO9~.t\Ko_pyfU!,dHnz_ռ("w;1ס-7y)I76f讇Z (sƟRzsXɏY>NX?_C֠% [sE͜\_+?YBQ·L6ixP$GoNwѽȴÇ5W? rJF8ymR plY~J١'+q9 ?pUԸxDBcD"T =76EWǵ<-oZ7SWV~]KէmE]Us`vs;lT{ngpR+|@ûzR9 2“c `zY4x q?8 x.6&)x4DRD JE heſ={(&'I {B)nc<*S>@|2'bFC+1@# 0-LI`Q)b%|{:kh(Ήї:г/%~eYs'EIc5UϧLo0vaS>b/j$CPwJ&e]ނ,5F*^%xۍ6oAw55)lNꭠzz|ۇ^)sN~Kg!fOFغ\}H'DB&ܩr ڣvya}0u pV;mpE&BfNXp8^q(xxxX?$:ۂ~ =qÊtH&٧yXy&"!+rPuCnngz`dW-*feM9VX?K1`͂'0sj*8[!UFm> SW>I;)孨4sYoQEz>_ĵqe`MΑtyOn;[woߢ{꧉ކQ0h òK0ٰZYbbH% tL:8nf}D1'|0.J$$(fK*}Nm8( [\[0D*-HL2"bl #+p.%NߌܱNtaC\_n=i{X7=q/(&{"]DwߡzWS}*VNm?T uEzsM5ݦnߠvS'{WW'Vb;yZirΟPq4oj8MZkxI 05 ˫i\"ٕ`ӧ]ΦVCQJ%ZUsҎA5 x,kNXD sA`]ʭlF9Sv5H5&ӷuNk]HCɪ:'s,NL:jާjb$cIڻys7qv!Bk=~6-. m}/{B'u?Kb11bVf;rcGۓ!=|Gȿ}Yop]2sdm#iJCLkֿ ?~kG=?Z 8ST"Gg)ɪv vE[QoG:ygk毋}8[6x evgK <֥bvIՓVm T_?8`35Rul++"ndIK[3z9ܓ5DsNڣB0!CoP۪fv>L*­.^+"Ҏz:a }#dvц{~vۇYdoH N.m'&f˩'XWdSM腖$6Ԫ<}gv{1OF虐GCk i'ȭZ8_٥YxΉdH8f.I=*+8!k!]5x=AwqqV/T0@|-N }QI%r@dȏ`z`dݬduݵ`= ENQofww xt xuCȅa>Kysͭd9)X#DXGsǜR ~\%%`*B7 YOܿzuZ=IP$}2}]E?k?k5cOhٶ>8o,?>?"YoK*nj_f77-T#/g:Ogw?_7 9 t1{7_|lV}ysE /S, -弆&E?{ЬeڧXR~t۞f4^SMHn9C1;-j}xdv zfG h\]8}~1[sVJ~`Q6Ñ!Dun Yn1Ӟ2[-~xzqNHWnחw7hjxdVEG+m]ns}{r',Ҫ}c hun a0'g?8__k*|2=]MźwnBӖơ[?SIݚ5sBN 6*\cy*^Y.m+mN.[/yR3x]9W F UdcX}ɨX}DSɕ*Zˆ*Z(*ZrTbvՙr)1m4%9cQ)(IhJwc:yfZ5`Y]agv>}}=D{}xLP; ׄBZ*Aa,b-*%Ulul+'er+'Ab!.F!d'k 1TlmY?fwuvdk*H{`-bb,'ԾEN}KlN}H+۷xԾ'lF|>S-.]Yy8?O<c P&[ܼu p\@~}7endwө IX1RpU࢈(S#+, 8kXBB_3B! ˼$2Up,>Rl B1):Fތ8m4y 湬/lL/}mȏ~5h6L(ɆۥCߧ.,j@{3I)rP]rTt$M-;dM.PkbPL@v1j([(Րd,wtjYdgYr,P5VʶQ`XV/RXRٻ,^*XjgA`X(r(GaeYfe1%S/*x2]Zȑ\T\{VV)ZnL̺2)'k#"}-CK9=Mm/9G[RI Ue۞EĨi(NyrĢ.E+vnǃ36ױXG7 9:\gp?&y[XS:_U~ eXPkyǥ#D Hb)6&-ܞS j2ȃZ B`˳Czxŋ?ծ%.l]q Oy8BG%C{B9EkM ц 0S,nh&#1&qD:QɃ='/r$ zĺfVz{39AǴ ?|kY1|EF iYR]HMЧ[2 a jT9qIdG.繸\O|{j1'C D"_ZT{1"/(mP(Q܋r]cl4Wd T *YnJH}\UEuJP)˂iRwQ4-Mo4cHJ#WR6X fqOLt;a)a:RԢ=޵#E/7>/n.-n&{'Hóݒ,ɖ,-0HGSl>ů:+ ݅"4:I%QExkoLLȃ4K1N+O&ĎO}f}  ӧhhQ3󗡦]$.|+7>Mb?UAvv]&k,"M0ՎFas?ubSf 6+U {vzU} jz9ZI&Uqŕ}SϩϟuMkn5"~45+iaP.W@uE?(W{tPq~/w4;:?B9+˳AΫ0"7b7sg7#o)h[e_ 9=WlS;K׿:LqO<}S͞ͿjI\G'$:tByZͥF^KU¾^y{eL]?'m@Igdk\e8;*2$ fҔ믻]3H 7!8bs=oPU5=[>v{v{ҎP>&833 U[% <}[Vh*uæc?8>9fp$p"+AT ӈ+A |1!p\' w#mC0(WQkrN%sU9상D߉`\D'u4) şڊ?SwwzW:n%Ё' ȶ9|:4Ie8.8m$iU‚7b9S+X*gP8'3_v>~rs/ l41` Ѿ Z$*ldNROM#˔!ְq[kI -IlFa?RrSCr.Fq_QׯOzMPj.s|T/8,s/FN=J^Su|^XZB{;nm⏟A͂=xrs.ރ77z!}~lQ^|Uyޛ|u.3>T~y"H [)-ResRCĭ^*LTy 7O) uН V_~rmj%DJ+W3[IJx"G>v| STz) 51ZKy4XBS>@|c%k*H\@D  hf Jq&L xtT9* Y@o; Q'_ۑ|N8FU;~ԻB97wnf6o'㟢]vr8\pqRkR$i\>nE-CQ?Z :l=ͲLxO- =9vUIr6kͬ}H\wOqP]Z5  4q/w YE2 Y]AuDuJǿ8 _TK芋VZ @*4CaɽKeC~3T=Ɠ4jC%ˆRW"ZPC%chq+J-0w8tj>]qb:q5U'\$ H\J>Hr7VR'H3h5FDhc"1MP*:gP]F Dy &g L#ZsvUew>xn{<$׬2gjL . 1T3qPIqQnY7QT HG:4b#8\$r@)Eoy֜uuJ-qMS^gWQ.#t: QMp(h}#->Sn)hoPXnqsZ Ɓ;ˌR80)lZ'T l&¿"N ۪fZZJΑD S'Gd 6$R*&TRmk֚sfl [}ulY  -}~xCs>Ycr5d7*/?#klRaגi*,.Fh~FTKyN=A CFQm5n톃dbvA-Wk*Z حް"@ != uMKcTaĺO4e}ZsևQ?ucW4bkF-kD]4bш[nd탳 @ffx`@*νgN9 ^@3݌"mpjTl]z5D%UuX’sn5_Yj^*]O߸.2S`U~GųDkg`׫+6}7jc^?Qtweז[͟6g`h6ZDX&QC wGl7"YUY;u IZ63kՙ]6k}-[$bٖ>ZE .,hy)*>(>u"2! T2C!{`dOܜ{Ӟ:3&B앁zW؂qAgHhd0(=JJ);r}2prz [Y8jqF^kqE7z7V; ]ލ8'1ZFER%M.*oUi7):Aa/)J3X3yoWp8볳&<7;:>/ONvRe5>yHQ8*AX.=P%cV aWA\@a\P} `+C8'NzחNLj˓= ȃLXQb%%Oؚ</WUIRh2YcX#dN $'N ѱNѳ8 XdJph,-PO5K&(=kтi֑n(ÄRn&x9n֑aYϡO-J))$t]{+q#BKI #?)Svj ; d?uWy|+%T ҏ 귫00W9g>>R-Nw;X*x\jYه Rh~t?~8fpHav\iMTT, O`*`AKaoSw̃#uT^j >H.$L(fsz|l8a6V.Gշ?U7EesKUf}Qբ’`"pRA_8kq.p {ExsQb9naX:$M!4gvIWx!ŝ?hv #O qDُA `UJaXK֊("0`2$r6D<_9ay9͞T(~ vMy i{2={е7NWy7(bϐ Q@d3 ys0"DD""Llax@ äHH1F)6[ƬV 2 6-n]#Rg2DOrKIAV);ŜR|ưq]t&v0ޞgd;+'w8>J=Ze;K75lxݚޕ-y0`h@sQNHWx RhqA "ˆ3O}v>֚lyޛ塔k$LF"AE͝W*+&aJ:HBly<]7nl+[7 8x*o|#X0wSa5IcdLY(kFgw?p3M9E]Gv}]DBv/uaē77=m.4߻6'*ɴX:jeVxO.{iS;?lƻ2+{/;|Bm͐S3x{f5܅ [+-g4軇_$p Q.VRT穄]%8ܟpˍu+S_F~E|21oq :J1Z:&T҂|)&,xtS\3{F7[A]~깱mMzaox SOM*z `CO;C{n۴I=MsÎAm/;"$D!NihFbyyIJ+G9ygZ(J(hp$552Z$5193(zP: ySeP爙 ̤7ZH,^#P!0hA6*+Qc3fWYQft67X>u8e$gop@)աxz4x2 INuxkCR{.+>αbƏUcҹM[Ԯ|*J6%ZQ7y땤?Q p2/?3нsیYǥ v ?R6˹} 1wϡ+~cFղzV?(̥j('% Ag*CA ƙARLrw^ۮQm}:J:M|A:M"G@_AYYOTjCHȞ ؙjz☽.#w¤sh yo󶜱bځ{-Uhi)8E B`%,A9e9-VT?łgqrcԚ;'H8ވ" mZ'Z/lzT%ؽקC L82c(X)BxA0Tt<9x j`E h2ej@QAsgB@#*$;A (Utt]ށGi\=^{}6dU|!ȑR/Ě!~vo=v9w+R[20ޥ6>i#Dcs;:Őg'[ i:/<[@-[t_.Bs0yؾr:@ |em(: )ܗ{X|Em wWu~7 G5$?6@ܷm/,Km^"ZSis>xgLi3w4Lԥ2Sfg3Sf L)X3k&L5Sf l2k`5Sf L)X5Sf L)X3k`ZJ4Hb܀B1CX4b]6 Ox&BD5f"LP;% G> 53f\3kfp 53f\3kfp 5@>22k>|88SgH*Χ^tJWsw) yM +ĩ;!AEΈHrrH>Kճ82 Ne!2aL"S`O4eTR+q@^+XoM3l)hs*x$Yk`~]J [pq6  ?ۧ[>}HYG 7A3./ 5=LZ"+m[0)B%LA/* Y4PYF/3?_}C > +G9eT=ͭ˱{/\K=aŠcytX+X,dqOUl1ǃUvоjq[]Lg6A_zџMz",GIXJm1q8;԰[1HgG”br}j{oxVЂ`=>.9E[w,%N[0JRpJXsjz?*#3?dg$h c c%$XC(8 6}Φ2';B TBꙵNs#9_M@SZ?&L`N G1R ÔS!,Uo Ǐ0F"!a%R QF& gZxr%7H ٩u+¥?5`BL1jv̂Ϫ@.k ˾HGgϝc<K!v~6vvSmUa.ti{mY5eE3w.!+W,(*dPB9EDt11X[0ަs@ s>[1ONyH : aQiƝV`!@$O.p/jBĽ PCϑ;cf~Eeݩ"j&` a $.$ޔPNV%)X?y4jiE:JDA!<'Y-99vNDfU8= )I"uHz18!9V`A\eVqpc"!"` !>yS(^w?p73IJgy IrҿdchR¾n#Bc>,0?KYd`LT }={4`FnGUd'Ցŕw4E;|S_AnB]i1CwqMÖoôw47iϢ=Sľa)V[\2t\ܢ+)(mwa&pc))7aZS L85O˥5UZ"RW=|)M55)˜a駦5EKQJ#[?b/%r/ *#-"-UL- !{ZMMقY-^'OMwӚ]tm;ǑB'c+[:{d2ǀľ3Sx2FmS-fV=/}Qɒ}V^;s61uTiStĢ2k 29*LZΫRI*{U%1˹ -=Q8-j'WaFY0KqU rI51K49̘3.k E<iYѠJEiV%^AّBC\3Z[r=> $J!Qc Kx";R'-B:ȅfH,:kUAU;s,;2 EN`P{4冖e˄ gilRF)h鶢UgK;^~\PE] IJSWh ,J''keeh<+ $%EƐ{To5MkPoϬ BA,g&Z'm^vpĹpGd%XV5{?Dv5]2䔉#τ1ْ&Ȃ.fieUSqt+k_O6tboV%I)X#1+{KReqDes,jzODHU:rӍtȮƇo^a " Qe :l]86՞*.%.O^jW?^WeS3)Қ Ia*,$튉l:gE4h()5ZҪ{+}v{_~K}8ӠxEppE`٢ǃ?LG녬{pqO, eGwvߴ3wzʺ5i3-qk,ǛU8nمJ/9SUy Gw6m3X{H9yC̆;Tw.cD^X [`JCT'(SUtRJQx^!NfR yo1,F3epTRM-bo.Lve/B'[] ֖Wc .B-IJuv0ܧ|`ZxPP7-!Ӗ Ifo12!'#!z ɕCP^F53Vgq[ϐhi 7T$Z5=)m<>mF)7&"\tZ_FAaG*Gj?=gUT~8:#@჉c֣ej֨7dܕNW94k"2l){J2&f'R^Ml=.E($L U02*հf슅2 ʵ]}(xM2`p\f''zBGϞ#69)OC HGƨD}e!0"2,Y$X%79gE^[r l B0jn1c3GЪ!6aa\r1;Ӿvں2j{15X}tR2 q^%ȯE89W Fe8 HB)Y*B!EH21C@H&&OE1>edfIҮv~Y5q֩]^u&~슈2"{Dj\Ь>zG?Ui뙵,K!u˵`Li$8I` "I ΐ|` Z$y剛Ӏ .^KD.NquYg5-{TEbk2?,Jn=@kdFŁg JYy3qTb!Cpȩ\Shfr.5rR]r' 4"~|GZ=G?^^~Q 9N/4)>9 ѵ0ǝflOa;R?̑R4ypJp@P 9h9{,NaPa;؝/wc:[fz o?t=~+F&^[tmLI z\8VtvCg_ngos8y`98tfCgΟm?lۓ|Vhܬhx3oy*0d_ITuǔZoޅr][ʲ;Ww>;]>]X)ڬ-:C)jGN+gcP2+})5OFŶKqrbꋽ1{lմ>,S-s`o{ u*s0H\dP:EiAwSTri)t5ВIv0pErW$Pu")=\@B֨ŭ'r=jk]\E 8RSD j4>nݣm=(Nݽ£7 ~ޒLPmZ;;c'|Swtwe:T(\US|v!G'G~hf gsIf> ɣO( Pg5cVEGLՖJRnh;6m=ϣ?uK4VY*J_|)πigzF_Q\|gpDN~fo?LcЎ~wKֶjlwl,hs`ζ23).i+-; Fk0. 1I+:W#)ʞ@& rH`!UIQpU5+YW/q!H`p0pUPHpU4W } \UTW/UU\UeH)mW/ŭ?;mfʬҍ'E-`+QI!yzV.etZ&K1R͌_m]&r<ը 6'/A垍Gi|uG_,{Ţ2i"? (po~Ryv7Rx荲Z3IG J'T`ఔNKΝұ $nV5/2+ѧB鮠IKswn[Txd^8/CB*_B9b2;ВM.qad:PV;. SδíYpb #0P1e-qs&Khs-Cɀ}vId/,2Ϥ58kɶy&9]; 6:=Qߧ!N1=ax0 bb4h#*IZRBe]Uw9SzsQvإAφb΁l g)u>e9i )e61x>%_$"HQƟ ly\.?c ??K/BiluOѤ؀8zƃ9]WW1.`mzȵ>MG8_ǟǓ=4Dc%FӖ5mns;ah0?^z'O˂jӊcʁ,NF/mEPe RCe*B.ե~jPtiSJ7›&'")J8 1`\H]֥Ȳִ Zr)=cB#dn5rHCW[Kd3OhV [S_CKw1ŴO(>N!W}TBma }swT>mSN+ s]0of9Ϳs5Zء^i-vp~d;t;7Eщ`f¯V70|[G-6xE]ikjDH)RpJm q(wRziʣ(v% ANݣ '>~Vs;{|{:Ջ kR%8Oi:N[Tn jh/vߒ5l pӺbIhZ5mpe\l}ּj~V(xˋcGyxhy`68k]q!y̾䘦e}Kǯ,KfI_)#^iq2i䁦uqzd׹7.i^E^T^SAtM'~7/ҖOsf[_:jyeGV}:~8PRDbY}U[Xk,H#aR|1ڌ)K SkE1Xק20-+Rh׮.Wٟ+.H+)yYM k^K.PA.ybӭy]=Zn>p1'&_Vw^bG#~>OVNb7ug |=fק@2z>0RWmT r ~yB}'>q wsӞ\]{//Ud߹ě DSקc]]d6⚘#y㸄̵Ԟ.)ٷ ^\5x8]r{̌b +n{{(Zmleف5ܝa:)u/ /LR+?Wq8WfuЁx.W,k`3(yӑgUvaɼn}#[h#mݧM/*gqQ_q8ͮ *=M &äY8fhCHOtֱrPTB]x gH3WK\at1 /tD!M I=x`Ŷ/EmG ukqva띅Ԣ_y]zJ%,Bڳ/oND9CVoLV#7N*J/әhʒΫ\Yy%P$a)=4ƅ16_uCAc.Qq-CEXMq{!iL*Z)C0-`9dA,uPo Dcxbs_ ޻m>vweGNKUJvBN^*d*`̚qi}P#O,B۰F3WpoSthLYώS-}N]l|zXTx `w60ΘJ)+<(N(׌{VzF /9gQ(3 2ɁQL%1Jwog(} \h)PY$sLʶҾH#*S4((de ") 1k\vD9#ڢC;+`^}8: ٔbr卯N7@vˏ_E I/^zv_ ^a"0Ze4&-؞.rVy숡y΂k$sZ^ή B}RC(#vrrYr%FB"FGRZȎ*ge'QHqbčB/JXa,[&!Qdt[ٲFΑrV߻$fx` 0jY:F8sgem)g&E:2*P{h1$^/ /zlhY(:^3iG%/\A:%3H{W5W+G}Ik6f\PP&s֎?~jO@ `ep+ 'i^^q8:^O^~=zzW=ctbo=`8B!scl h1ޒ!xBf9qK[{q6J=" jYVF$2JX~H,*%g?(ZHif{̓%vBezhORY/' Dz{${RH qJW$`a(0!Zc$gd( (,( < sN?X=S~,!H'` *`r*t R$ HPaĶNw< E{iQZ]ƫ?:. 2-^,w,y~"%8+MhJNqEfޏ]dSVV`.0K~zִ\nቫ7amynLy}nzchYW3[9>CmǧO{Yf>:%7'ykJ00rC7W r֑Bۅ.YP;cGt{@1Ke_ɦWwd0*UpU r|gN. p$uEfYEA@wmH~mR,2< v'0j{-9-[l],ٖ#QrbbWw?~E2B4N0(+D1~w0}vW@=R&3ǭc@^(-*%J;o;go_Cߥh~7mS^z08,] 7o 2eP"igjfE<@lsu@GF9$l㕷M&jS?[PyRw]Xl֯/>^h'IA҈aYb>3q,8Gk9n$j_sj-KRr{MvȾ>7X[pí޿Wg$OC/3Q09kNQn'_BSw._t(_t"1^X [d}!%KZct eC$jNɥ* Ì'tL)z yo9@"\]JG*sDVRgGƩ|2іі"ȃC)B|M#J#V*2ԗ<11K.zR(;t֋RN9Jǹ j#c5s^Gz\V}+cX}argD7''az7g'4n;KO"愞h4( W: ?R!W)Ȱ de&b ߪC+gnh!=獿pt~4|*nw-lp"oiZ70}7CV9 qß[G|Wt&nyWSSq'Y08~VދL{kx Eػd!n97io;L(xs4ӴDQL87}n*=5RJ߀XsS,'pFdY]/%s-s/0ź_JVr~7_ t3+2{6pUv.pUUpU\!O=\}CpI cAbwi[SHR@pCN@a3KN,U% 4d_.5̠A)&˨h@jq_["ǽNռuV֝u1[p'e+G/IV-32:viTYGp &_TC.qY3O:!F'"v9 Jm#gO-pp!<@ZA긑e8;\IXP:Y#/EqDS ԣ,S %92R[F:3:04FL%IaۀA4D}2EU2RL!(ӝѥ{Fnσj>ׅ,^E1?[6|9Wcl'_^Lx4rQf1z}tyZ=5,GPHiOO|^^ltik6_s{_nZ~pkaA\>WgQ[WuB3OpoIقꅪU?Yf}ewuP@?zi=;-"ދF>Y |xG;q^,V;#@ y~#/(x|&[)WyOkDȌZ 9gjO/JU2ܷ?*S颺g/?}b]ӭύr?h:}N+a1jIM˖ Omܹ7|o\9^'6u/ T7zcb] 2a0}!XwXW0I2]]'f;X_wuVuUNhIu/=rR?9>LAV!*a"ic,M82Bfc5(UKI #>ym]> Ѵ}#*=;4I([}=ҡKĈ!R ??uo?|'nRߌ&60o}sPru3;e_^}&LZ{LV }L2 2ݘ+Z~ ˛w~bUNC33Ecld0Qtc̛H+Vl_-XWDHZ3"O <1GZNȶ/dW/Mm96Air>Q 9WzYAQ 62%ɐPE6-y[RZ=zK;o3evUǎ<9?o,U‰2Vv"K A޽P fbx'"*a7|@QaGs)O)WhGE oj=U))EFD䳲*M+^6@@Gfg t.$^% RdTi -g 3 < #g:~zޞq]|Ulv/lE u3UVUfT9?5J@℁:Ȱey]= 2c3ÂovwZvZ~Ļ u2»,(-( _qHNZIB'>[L "'Bڠ)جTv!pR(h H+[l9#*CԤN;/W%o-;BL"eU廧'JEaAF!HcQ;M6>iOɲ7!zgR$ Fmƹ?|.i@05*3V)ƨ(~D:1JuYLДϼ(^T8L,R,e21ʢ- [ld USr T<z 5jj91#ûlڒAT **kټ^Y ({ӿaAKA1"o(7&絋*(dg/UiZ͠Vvh'= j (Sh_X>jbYv 1@i :-4X?fZ@= &cPi8hI>j:׾Iұ%Ԝ&b`-kSuy1E"av^[Mӻ7z>kF"26fU E6?U*TYxG/B-CLX٣{<.^;QȡܡsUF?y.OOC~H#@F` =hya,דj<Y[duVQ"}4 "Sth.p]uEj"@y T;#^\y+aRY:-}_UdQ阼&c0I%D 8&eRJ[YR%mŗ/@S`!z1iz)YUAg?-3Zg/m|!_ߦן[W]m-Ž6~m_X-ttqzDn6/7-0TRI$C )N &F s6$G\9Br`PE>9aJ dP9o Q-FԢmCrʵEZas EbYDJ L m|Vl9{)T@} (]ƞFՁ(# XOTR:ksVD 8,#Puddm95 ]C-Zf?f $ 4r&r긘Y=TAv4B;6QvH17^Xh}}1M%W[Uz"'{[T*?fR &Y  YdCm5kM &X V k'@gVPl!32d+:L,kR,m4 s(|HE'Qd6-yywy6 $lEJDX"Q"qgqmj-K>?MmYH% sZ'Xzz`fn0lg \1*J3hRȖty5guVy^"~yyd!:QɁrqXxF8ŝ}Cơ. +tV4da[ѢvA"-C d1`~ZQǡCDC-{ב{վ1Kӫ0>q 1Dя 7.қiEon]Co[W4 [2ں#k_\]|n2ЃNy:2u߂"FǨ4*[EeĎ}ZGe˨rNV(iq؀%DRFZ.=)Pƚ4FePp""_{]KK5K-\ _siw jힺy[5G_Gwѻ9?3Ø|<㑫x'_+{̼6r7=̮w̷@{އc`L IcTb SV9 T4hRpԂ$oNA70Ym s"F[Ʀ{?C&jBgf I[v:\;0it0wH- LAz5{PQR @@dT*jZrlzER."AbJ.)+iwP9E%GQ6>ak Y͸<8nWxz׭t _M{ 7Oؾ|ڈ|.?_/ ]!g#C$YLb\ &I}ĬbGmüK#.˻m-Eև'Q! )X όYkc!$5[8݆Rʊ:/Wwqϻ>.3k L -!i t`Zăۻߣtw7'$Jg 6:+GfF|+gדɱ]{r ڃkFX{9+4J=vzY9F/wBEjx9/Ґ+\sǛոY7/1yduzr)y57/JL޵qd׿2';kJuJdݬY&@0n$I IgA~V@~YN͋)H#"}ֹTC`)ՅT]Ys,h ZjQ(jZ$c朋=1#| A BYiYE:RRdM7戯$E7ʨT61o;/NU|~'onڌ;p #C,nq,uYуhr>Uh %RL3Z`Z[[QkmM=C<'e/>g<X"覿O[ZtB,٣ESJɫbǬg0O뒏zܿ1lOANjO;akF.T EJdw`{4M7?ʾՏ/;~^mv]1pb>PEc;t?咊I/A/N`gN%+՗X9y)(7OvNt-2aOMv|XW;Z Ywh,N[sꢟd3اc;6G5>Ig}C*I9go'ea]Ls_\Άg!` ?s:fg\Xz.GtP?3~Z< V|dQO*r|i3tI.ٟW\ns'LzF|"eV~enUc{=tV;8izPdLREYO'_/U T~;?o'^N?35fk~99u6\1]rn 5^Oxqg;<\qA ~@c\k{=5]Hgo]^<\j__$hgz{ʥJgsJm*܋.fmϨCnl>h7ko2nwڑ;G' %Iव dqY-.3AFD~81_3OS9{/''mm:JZUO|ρy׾ɗTGA}b #zu=K_?{Ip:=*}'>1 hqcu1Sk+p6žh5waX^3nAϱٴ8p^ܝx.WٿL7JN$pI6^>kP(UilъWN}(g[6~(|~Fz6@/x~vίhMSV&_$zrJ6b,F^F+ uK^~ߣƵov iVwktlqฺpjgK0J}Iyӟht_͛^t+q=f&RrF8D7&y?\smUzឍ?8:ru{Rj\PβϒQ-Dd[nܬd3 b&y&]OV=믙_?xYVʉFCb(\ !0I{;`ع:8r6w.\9i_rVIDDJkm147w6$+( p^nܼ.vrsuMvxxCZJA߮glHuMק2][^E$ 繘i50*0g_=~ʫg}ҟ EqCN'wg'1]LF{ڠrǡ4W: 9 + d sfP58+g˹e{l ^IÖB^wM>Wr>I rw Pz9a~hW=_./o姓}.lN/qɷ?|?sP i7' j~<,R?>Zv ;CW.]UGiOW_ ])2a3tᆸ+tl=]u +GV]ؙUGNtQF/! ]uw:`:tҕ~U;w(~骣瞮 }.U yW誣t@Ix`wf CGx (ٛ/pꝏ;/c;ң 5oe%漷$ w)7I> Ӌq:(u~j=}b62,mu5g /'d9|x~f;k !/{;{C5;6x o<[d4 .㖬aEe**h KwA fQ|vEyf{*s tJ!fS6rdqͧpXPh!ƅV5[;4HlU:Y֙] K(_iD/ >#p軁vZ>ŏRK¦JEݸD $|)>@"'NNEHcs6(G$ժ(B-Z%EYVKJbMSUaɛ%N@]o/Oĭh)L UjEB :ZPK6ʈj(7m[ҒD+% Zm1Zzh wb5rIƲoNi<\dlrStUTZh-pR׈ vh̨f kD34fs#ƦlZ+ 4eʪrQ~h`H>R#w51t*C)K"LAt(#FPU`#s)WT4ZRZ ؼg<3PFzxw(G@ԻsQEڐz~z$^d|MvO2k Q1|}&/9ϛtoUY{US#6E9UJVƠ3HE(wp_gaL>Qҕ5j-Ւ] hHHI_PƠ[AdmtG7&X)ՐR<@*<%!Bb!Y*j\p*"DUh,ړfmG͋l%2$⪼DcR/1k+b'ψ"R*H%n) ;*?[!R]J y;a*i#_&| Es  8rLrSN[1PQB m>諀Bkh :83A%TC:N\zQf$g(dunXՔX4ęd BZCudul.L+(TRTm  E@P6)606&XF6,F*dD@98_wZH|-x*+A<)PN-` .7WjXR&EfCsMHpe $ (T_l4-'LV<8$A`U%h;t!I+Ԝ #q2Mo[@0zLB€("2] HsD.uB!7[S2 :m<£60ڎ BC\9':sD5 R 9nkL66/™Y{S 6řRPHq 4s$eAp5O,)RX6[o`Hn8!{ u9kxQUDI)uͅ[eU7 !QH[ԠDtX(k+}ʩHo iDZUhVF |f`=arnvU qǬQDr|1 (*S^.М^p]azV44/]y*hKFGζ<]`惆i!ВP(*AuPaP\J7 K`#%IK bAhtW!sc ,O`$;FN`QB'3ԒR|$2L&rZ!d^`L_nLG¼$/ŗ@-[{#fH܆6]`JrB#Us05"Vdf!JH7wz&] 6䮬vSqQ wVDO%-,MFU֏Gx|:LIf y3cQzt:莼f%A被:4V(k.E%44Wn|qhJ:f#Gx.V#P5 oنbs`sd1ޗ5]v:-i>"G'G7hi ELK~+/qeZZ?#_WZtғ`姭khx<ˤ]W JF zPm 0d7??8\ceq7&/xv M;=й C;ߜ&7]Lmiiv23+Ok:X pQ\q03a(ZSc5. :n8ɧVK0E.8pn+K+ &r-YvxdTV꣦kTMJ}ռxc@w̥^sgOWٯsr[ʪ͗oQ!#]Bp)\F%曨osJD[#WȍTi7d6a%̠]Si4"/f>=GigyFIojs"G)Rٕn `){Z3اK}VSĈRFormPs5RSPXMt9Eܥ.]+$8(9;q==PRlVrJiKLvLV^ LaP/Jv<]ﻻ)9ۗ讜]VfטCqW( O(^F }H@`'Bqp(fPJC]Ϗ?tu/d){^wu?i3%rϢ+~w]=P`g]CqW(-wPJa]w9~g] m0m̤VõQ8nLj+ŻY(X|Os!-@Zo*:v=;ݧFS2pqb/ Z<]LnbqXɮ2z*$J'W>VnF|wx­\?%/F缽xK9;4}$߷yxg[VKFm#frzr[+e<1SpUrƝ02XjqոfmY{e}XõWfmڮOjˏfZ0~V#kgOmy_|w*0*v2_k \:]{Ά6^֓6 | x%{3;2,j|4߭>TBJUj? ]QzFxxmp|1'=EH{"ykyYH`th"HjfCx#D A"RTCSɅ$u0^F.]>z)|.Yն\W\(1e Kc1* ͤ5r#Ĺ;rl#i kD|GRO8}r>~\ qۗ;2[7X/&,kcޗWwZBE&VJqXjiL-F`5TVՂ(zݣ&|ct2-wk_p/pJ sQU5<7{C*M\t]$>Չ"喜/o+(|RrƮoh ky<9ۣAwAgfG,r=ӓ+Mq% 7N+/!cn/s7k)B`C+Uvst=rAW>.=@YONga:e.A)jD4V9Yk胁o<IX U`E YHlT6T{ p(cۛ8wC&]{G.h2 B9MNwRw)4M{¶0zcwoqhۡᠸ.i1VPgYn)&dnu\CV&dܫ[(4+r+r{Y0IRrbTVXTb20> iYv:CwޛxvRKN(e*ᅃa3iRU;\uXgL,0t{ٳ&Ύv?̬U pӒW^vX[ XGɋ2I, >~-t;gK*CNAZ]A$RDU s%YdBB4QDyx~a8^svE3. )_?Zn,,yrU^$ 0>iZTG/cVGtoqX]t|BeC :pژϳQYɫ]}|||w0]c)hurܚ*SӪ {+TmR\̼e<7#-@GzSVv-!U&q"7,BJ3'C`)9hؚ a4Ȭyr)dS1EfSxN968]Z8NߞzsǐjY@$vfY !j0+̆8u1}j&\m;vAnUc;Ss%5WnbsڧqZNgΖiV;Y8oxM4]$v*|u;hI۳Vw3/Qo!FgTR˴|S1_"t@ʷ7cĬ'wxMOL#881w[\ 㒉sv2٬Ɣ2՟ΰjM>+1H? rܛ ʟ]ݲޡQqWE_< t+~sZ^6wm*3PvjMxjX05;o&3;Y2^xA?f1 ћM j m:I v~khrYnO.'il㓄{ +IVTJ5hrxaPUAsVDOtp*wB pIh+6;!TR}W xCĶֻt+zYՃ&pqd:L[޵6cWLC"?Alv @ciYHjy@,%%2%{15*:U<\9}v뛻E!ƍИx"kla%q#:9wJksB=K6Em((iZ%(#p Ayz#іa]eemmbdD9 chU~k&2L29zB}Jimƛ9ib\l=9Ϝ Bkb"8P QU?EW f}؎Mp/j2wd XmII |49ɭ'߯%0+$f⫊+'۪Ͼ~ Y po SJkz]%Lj^X)92I'ID_Suk,Tu" "g֢lZpQ .3ڃ!P{Ӯ8/35_uǺOg;%mCA%#m>6=7ϿAD"989ƠdVBjmu[g0w8ZM+֑.k څa;%᷃}s&Qoaق)?0j[W;xa'3fqZf#=b(ms? v t$``&3i8xؽ''?!O :\t[z[-*B-IAJBFtD)--r乯 <"=׿^Iѫ/خ0>J'4-]a_ K ]0~|}Ǜ]CM>O_'ީET^"w n;D-r.޽&|4.*ew1fzsӄJo,Uzl~`: 0Z3u2oՄMof(+Y5"bf1 LF-!xO.Mn1d9:90Ue~w\au sGcM d ڨi6*ZAĝ7~`uoduo %{ǩѝ-ǽK@.C1+A~?5܍0Yy3*3δ梑AL`cŭq8i\5wG.zc)dob6)sQy^Ou N*Q>Xɋ±ꙵNɥdo= Hq[@L3 "Gp;+1?UU>&΁!Bv-mC-.eXJ6Q$YJ%A Sd>0P`wad]\Sٿת1{w/c$(|0zbd[7*z#w%.QtUQMs){ϊ2&f'acG"YGt p]&yqjXXM3BU 倅'D_ 3\^lOV_86/4n<~ߜ?̈cNFqdHg"X>2,DTLrݼmˆR9cE^2˝V0N?Cv:.N'm.])WQZ#-"4miV5~GߕZnm~!ۜ Tzl݄ F$5Z&l(S}/V?QuR1پMv73Bs7I"Nޟgp^DP\̘crYDmr[H`"S2Ne?<`}QJ:Jv){9Ķ{]C5 TtN.1rDs3+yPS(gb =~I޹:\No*`֟CL}v0a.di^߄qᚦj i1^"qO ALq*|@˲g}=s:['s$D/'sGmz֒ өKD^Ȕ}H%^qtc";Җ.頔Rjddڄh17t9ʖ8t߻ysG%}u.sL)^$%њwBxtRl-A LX2i/t(]vMn=,v5X`t]BV_ I8UZ4Dk\+6F4j)5\#l8Bnj*SLyӡvC.F?dzwF|{֥5~L*Bsq:Ԅ_ook_1Ur 1'@NA#3HtыDa5IG/,$bgyV@͍1(O^gxBJBHu),%}6#Y!J'M{M?\+D]ܺ8/B7;ݧ8qxd[>oJs^gp'^2ͧc _oz? To(ϯkfQrVH" 6'/ >WoOi8ǧ;3O(fZ1T5nkq>!g>v!NJwbx9JD%g ~&:a;l .0fNv&(qAL$+CfIJ8UJ9CLg8N{C!%cজ:leҏ1hyG zYڤm`[qy ¸#7 #ǻd'kH|N&q$Ƞ@2{0잼?DY_m?lcR9{(A zIWږݍYuLBVK5@LU]ЅUƣ7j͘B$m2(#V*pd%NI8YE#H㯇+knCZ9,ڳjϪفn>N/ChOmƃ(2$oŃ4i{h=|i'/Fmp),3du%4eLGeJ˃tE֥he`WfQ"D _V716\I/=8 OUHF ͪوwuA]QFݕ0*+##|M/]F<n?.HL^#hxS-`AMRDj2Psp٧џLBqRy^yZ ܌$@;;.Zw.*!ZG;`-P::u.9ģJzGgg>3 w Ubjqd퀴EV䅽Ǫw-P*O Mf qնbs7vh$V 14?B, 25Cn(Q%aɆC,w pGU\ @..p%ȧ,$,,{-y@{zYX,1f!꼾 rօ{Kg1^ iB+P)e%J,@詨,bALSdw_Qf 7=F`mR8( m7XKY$j`N9vKTGZpt?tU/yӼq7hܵq]wmܵq]wmܵq]wmܵq!sWm4wSnM)~7lM)~7u҅wSnM)~7Nx3)~૦wSnM)~7D#H46wSnM)~7&wSnCSnM)~7wywSnM)~7wSnM)~7wSnM)~7w[iE\wSnM)~7wSn8o0V'+.9mXN|VH63@x.d@`1-UK~I!črGeDA/9c>ٳ엍6V Y' fOdAeC&ZT YEn2wҀ譢jgnl9?cte'ژ&]0ޢ[:džYgx5]BlY(T] #c$ Zkuf>j55A5TMg%%%ѡT)rJKTQb"&S#͈)슲/YLE %X؋V ;bJ -Q_wIRITQ)j-g?Q%@hla/H)_Mxae:)9/}mv&dFeK0lT% ߳֝( jVvc־ZM#Y $%_|o.8$*$SLeyk\o|PISs}4,FeVߖQٗ!ZB`<&eaI„<!LrZH$l/z7@#z9Y$'Sʨ L@*&c쇈u|k/[ؿ\v} -of#1 <>~GWsxC):䔊B 3#ML>IHd^<9 A:Dّ!":Pq>`we V0 @f5*4!(&Á؆ Il\!$$b!d%3na8be0AіK)ajԯ!'` IR1mdp@'F(eb Fl֚B<&.͢P ~=-Gt$ߍ S˓t}uƀPR*;Կ$U|z*I6h!__s{.Zf}iWa1${ͥUUq^6o3Nho1^Ry6㾍nr7g_rJԓ&+ye&^,~'O>?|"umZe+LZVuZkqvnH؎g~:͗al:cryǮ[[$6]> L?Ȭ5?]~T|seO@\*热>Ou\;mv̩2ӗEU= ?sOK z:U+ >'Ek!׮l_|v9Lh'-a/ BL`H)G/#R1Q;-ݵb+PiK!`DA:k-i͸]V;YhHyIO^RgJ'(,?y#c;Oub:ilAN 63uPvp%#`71nhGݱEB՗`ڙ ڗަ>3;AJP)SQ0F!2ʧG_{#Fe A ZPS#4"'^;'^-'*#JEf?-={3ɚ-1cIĄ}9FW#[(bJ`rI&F r%cBZzx7H$BNQAwN@oُWxN6\cYMX|5T{շ';l?|ڈ|'p\HP;)8Ax N 5zʖ\*{)Blۏ.`LW3;Dh6Oٳ[&JB[|BOo"Wb6FŤJtyJ}ܔ 첄A \WZ*ܒ^Dtĩg]q`(_ڼAo{Iag1& J:x'.Alӥ0&*:YpB2pt]>hV=cU֛P}ɣni>ˇ<A/y2yeޱOhb>dG۽8{ >㪻.$jŸY;Yw[{YY鍰Pكm{#ix@fZPE(a&!;b0ŌHZNI90g59j9^02> ŋKpX-ȄRbjvdt} *$cRU?Aa3wQrKj'bU^5}T%o0:i(rU!'$ ^f+Ad$bF3oCH S90c=jo9@f^aiyX?𽍓oׇR]<»uV7fEA?{ב\ʀ_z~TWW %`ocCϘ1QZb{NC!1a33U{:e)B׾~pIiq~V$JcwqRddȥHiV@Oͽ/-$Sb-PG}-F'3~dLEC'Bϊ՜wf<-;l~:gǧ_]0Nj_|'/ئ ."=A~VRI9Sr@dTAT۴Y.b1TcMM>3%M(A) P D2u̹lH'sTPߣ.SRNL,1q $V2ԫ-TCd 2m ;)"(d0CMPV)Ue6HfOT1PATWx89 /Tqx("Ĉ({D#❇;^SPKq?y[T/uWMQ s/Z)m\Y{m kg`_\Y{[%g( L*s cT'EkƧ9Y<7x꾏i!LHjs5bMVr!f(IU9p*:zZ<$0< 6agq7O?NGṣU[NIII!)]]9aY_f:N L;B}ce$|qmӝweW?o>mP p$5_N:?]wLC>}Ȧ6.zzixxPg:rwC4^l٘7\;[+~|yr7V $ΗCݬ(fFN?>!N.Mm{dٺ8{Q߾~G2ycfy&ٗӹEGM7n?S$vY^L6%U[k&>c &?&bYGksu61ZC,meV!7g SajI=jӸH?.ۄ}v8\ybN["06{d||IN^N̯f'\ٞ58P.gyYaW3?@>F Z9ǚ^ ].\0Ћ՝Gf{r]Sl;^lWzCo_-|e? rpǐjӻ 8~oM Z]rHSW9xqpoG_ N,avE[-JYs3\EǓQgۯg=;o R&+3{~~{ma xMߕ5 bO39pfYv}={FF]O6U3nʳW㓷?g#lw%`χb2ЂgI1w#?h)Yު\w SO|otIG*Ct 1?Q/&M ɖ/W|(lX}?G.cRv{zq {Na7S\W)bti3LDphrR~2}V|i0ׯf+|u? !N׽/ޘølg-L;4+67MQaζ ?׼X[B׾d_8[`~?o(E-?<`mnze}$_п߽}8k^ ql PKE9ϱ߆2粝 ^C|4BE J5; VUzkW]9-__?~h͍w*{߉,CuOg?m<+ܻ3ڢ.U7o؁j~7G ngs60ßo./{tub@b'fokoƿX_<ָZJlR]%[{7YGg_3u5`RUp4hX>! @reݎ^<|@xX|=^?5Jyd>HbhݱQ燷HFC$GkV5-bJU'(̮DYGKxKdhx'p&Gw#GOZ~} j[,G'~0"uV K*-yM^5ŋ;Q9KHı٘}I1jU kPlEYVcJbMhj^sep0rZ쇅,Rצ O7hbSIՒ1Hpz#SZ;"Z ׊ Үܟ=?֜%˾9TLs&kgS]=hUZIERL(̘f kD38fs|cS6w6'8PS}_{ DF0S W=9j.mC2JTᚷ/i+K"LAt(Na0 ͐1Hr6ķcf(F#L]ʹ=˟ه!+6Ye`<%C%% ^ L gU^;4cm9 S$G]0۪ 1Z)x:rdJxK Պ.Ks4- "H|p_2UtVnmG͠ m@}Kw U'eSUߨ }ԀXE9Ux¶BM fB(NAk}t/ ;AUE%e--`$d__b=bP"Ǡށ]jSpcփ^Ls  Qρ%_>$ivK]JS v0aJr@Q u@k^/B]%KIprFQf,6- b0 A/ʀGqح7 t& IBi"(ԟG=\@ X!oؙ Ww adR&tSQA>&qH516ȉ|j%;?zNYvNj$k4,Nus6R*v[QTmXP@ JX 6\P_۬Xݫ!Wjqh91yKFӣy.>nӞ-N%rW&K;7P7ל5̂Ges aJ`a]˿;ώ*'2YǨ][c6^kMޯHٓ盅xPב^Gzuב^Gzuב^Gzuב^Gzuב^Gzuב^Gzuב^Gzuב^Gzuב^Gzuב^Gzuב^Gzuב^e֐S׻ӡ-^du9GOC+-qKN9EI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BIwL?!}2$4,TH hI h IHq=3@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H_HpS"tH h.'CAk=zZ@RZ$@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@zZ*z>>:,宻\?Kx;i}/n!zm]Hi`BF0 h} femzWɘ.2dIIyFe>)7 ?,jgO(~(|aٽtSCmLf4Si*M뜸eqӏK֤]4Š?$tRR-3ى?G>-27~\ralg.%va4Zvs ß)n|@oߖzV;3+Y֋^BWzeVߌ`4beR̆A~:*„ %D&R"҉@iTc]oNlݾqsQ2CWQveo?50ُi_c\w?]~gћcd״_GW '7\\gN(-xfI7?^qȢ-/ĚF:e:K88Yw+K4v<ZbɖQe@/Q_?9`*Ӂ/[91*yQPp+ L&"$$8栉HeQW'T_wBXxLM:I{=I+_ga%^CU?iv'Yo3sK׮kzt}kNb*֩讶l/}OQ~=g?w㄁ީ>17)4.%҆FʋI0?uY4ΘTl|2nG>j2Ô֣sL;N8 yr)!L 41Uiv/|?oN˕ʹ&4D08$#^d̪DaGs1kiJEQS9Ohb1>3ج; 'vC{mð^=<+SW7>?oS D(iEln~hdu_ aƶDYmquU[e+#F5Uѕ 0:49Kh LT $ cx9"(DC8@y #Ǽ@ ǵ@{{R_92-uv%F̌J+)3crBƉ>)މ@lEUX#r֜ޅ GϯFhܾ8ͺ4LbQYw1(%E`:XB,YSRi 2)i .0&IǖtNYO:)A0⭖RVkOz .4&%hTבI3 S^QB92@a/$;6ݚ1F~j_ys(mr3?;0 ?iV8F~&m}OiKͷy* .}:,k&l:X͜oLERIb2oPN59(eW(gmӕܝS6c" ^!@H.RZRV^{8eN|m\u~^urwFоߪ}|Lm1ΚYS~wk?ޚ=cD?g'vʻ^L2:qZp۳5G a"e&Xٖm{[kir'eWZ;'!ZV0j+`"*hI!ZrQYz1(O^[χ?1=r ":\Y a&p >S&ٲIYtj6Mn$fF9=gR`֖7]ByIu_t۸<xb05t"Xհ-w5Ri=̿>z<ͼ6f^ Oʖ\)h3Ծ: T2!1ಈBpԻ\U8֜z/tLx9LZlGoi2'%xwky_2VH fFDA+I[>ZRKvɶL.h QՕc74Rej8 OsXm#2X^p<')'z9g-'B(c]YTz-j)MK%: N-HƓ KdMHE%]9;zG$g^,Hr?ks+_[ e1,nWϯ-{ "2TOw0J"tI\x~R}ؔ*aiѶI gc9SV)OVg嵓ek@:*I)8bb*6Q˕}u<(KzLiUʅ-E+GH]:ﻡ|ʠWg-EvTCN*)NSiUb%D֛4͹lKK5GϭܷͬgQrN^h}%G5FFPYcʎV3G~Ҍ3U6")Q#gJl W!, &Q'-g՚_~~^N# fK fav,5)#(oٔ=FWiRn D tߊ:76zE:όKrmu9̋_<+N"JjR+L'at"HBe?Fc-3XH% NL5iO$ܶ-g*J3N(0uF 0Wڸ j!bλ..징tk=p=< [՛ْ*1xSӫ7nsg~z=h٢Ny:eDYElN!nߨR}ss<ꉝ񨱴wsQ圬<* ]Fm,JK2%2z"%ƒͱcbxugŮevO׋UaAڬ.}BlgoxKxKr[￶nZxU 6w>yܲûx|xraCi$)y+{P5HRh@*fDLOrQ6=aEaLIS."Abക\"R0(VwPsJGpУ5yt]u 8pSxr=CqyvǧF3oJ+HP;I^q !1$4/=S̚}qP6ʻ@o_2a^͜o^oh0$(v'QːsE]Pk׌Ykc!KBލk6ql ݧu\=2= =Oǥ 4Z?5 Ҽ>L3sr=^Xm{(]_>ؿgN:+`@`}[Om8\\zk @nA{p ^20DJ笴Azs4%d ^ZI4'~E|V>#S6^?)=/ih~ s½yR=/uF 4,_}@tW3$P|m`Fnw_N\v]. ǣs~:-~Xl`5~.n<扪wQSh6ۺDu38>z@ZGl {o^pT' q7F*nz]^qHF:v?}dXjM tm^)ΩZtZ#3 "O_ps'9j,caޞMFyNN($"bV%z,2XZ$x<`|\bi.2xT9{ {I e3Z>dp aoPŧ#ne6[7Kyn2pg^6#Mdzv7v"N2:3u DD1]N[OG{l<%#Dz}ը8ڲqWerHy(-%ꔥ*^b@rB3tygg$cM+r ziztXYkC΄""u09"3{E7*焊(rZ D{uL&:;_~W2ՂÈ{Nt$]@%V^LϣDDkbLQ̈́Roo=1ζ168$~}*2مoX׀W<5\a_?~%koտ~ϯ@[/4;`u lWTQMI !}7P|J֥WP}-7eolj߾:?pR/Zð/U|m@qXQ"m-.Kf2L LJ:Uԟwȱ|[% U2SJh<;N3 ΍bWG%Dhהu'rL jb N'%ɬ06:h@wKg.Նi[lr^2u 'ih9}絏Nr6_%`vK]/7>EqAI!e.&(cF'0Lxh(EP@jfۻ4xгdiCФSȺJx'/cTx}D&1JuYxhg^;RItX3XMA" Yt(L͙,bȼt;XϚ @=o}R3:zSlwCPk'ХCTPErU+"r4jKև+$ZJ-Uk>N?]  dqQU]u3*DV3.4\LT)Ƴ);D"@b(3>!"JCLӂ6hb8zu.L&^ G,ӹ$LYT,`Y -,c\ʺx/ }zDzqJDU>X>]ldTE*PYxG i#C0,4.# uqr7.K$X0|fual~Gܤ7Y @zY-]^Ut^Uml &ɳ|V+Mw|c˖:@thhBs8QB38lC䘥I# `}+Mh)A1Jd`bXiAJ&~pZz2S/d!H5*m5cWVZyg[eGGm2PdYQHUU 2w5A-D &PfGx)拯:ۿl/"%U E6tB:m5Cydq^ 8(y\GĆ@/7Xd$$S=IE䕑4W%B]( "8,MO4H*)#&'RGFC=pňEA,=cJF?{Vg9,`̼,i "hx5%8I-fɱdYl9}Q|xxǯŪzrCD:Ls%dȘmIf؎P3 `:bHHcJ&C[=;6?V? g˹+V^d?0iy~85j,J7^l8ղZ-ćjZqբՏca`(~8M<㗏;XBHx_an6g|4exm6_s/Tלm7]R$,^ElV,:Q[h J?HʮdǭDy>| ifm5O-4^'`_d2|/O=Y xDmVK8֭GFN\][XU-]漦^V:#rJ! )dV?IZk~̓Toam5nINjc&YNBMsVCEK/Ef>U:J6Y5%q<㿴׆:։U\qAbsYۃp15yMnx4`4䛻*OLTXuzLMvEϛX@Iȫ+0jc,ċKx2B ǫOAf/>%e%yz@Gf{gIlv>ala5&W$Fd]2;Od}&ԋ8k&?)Fw;ӛh$;;[61_&n^ #~/<8u77tNuuHv 6$DAˣ}Gn oG08iIh9262dM@CR,(@K I`ȀJSr|$s庻VR 2ɖ"w2ͫ6[JP͚<9zB[oov|s>j|/y.hӋ`O}ވ8P7> P;|blbɮBl- nadΙR3NͤťfI\Dj&K"OhǀƂ*D:B"#1\ ڦ%%HYd:.{S)xJ<( g>RdTiL5y*K"Nc(uHOu]!Wi?{mfO"Ѩ?l%0 Cbz6|6ΝT3(mw?wjAbF9a\9 !-^ľ ymy5Z2%ǘ^x h9Vj FŬI/I42֝m07:HOP L~C^LnkrIC"oF㩪LZJc?yL{.+G,|D.z2`~.5oHO;~3b6f?^}; DԾNuEGN2`0 ߑ&QͽFT/ӵB6dc62 >kJ~^m]MV_~Aoۥ4UIljrT:1Nb).9QQɡ{gUI{KN8s/x{emSgJ M; i[ 'y) zsJl˟W:if\ob0oDzgms#VCp>z̯xgCꘐA1b7ލJǚH.H]=EdK gz?{ty.)9/|(-ƿL,9Y珪7..Sm-;6KiOވRilNn??]H5<8X] UJ$CS$kCǷUe"\o VyA0!D]&_F?ʈ-SɆv(gx2Zd33,%I`"# X#De t,}z~3n<~HB.qɵ}jj~(|%XW<歒a:o}>#G9LUTB]tߎu8[>oq#qO\yv˼ZyT*Bsu7ͽl'c*t76k^Y/uءӥINR ^W?S so[I4?TZnJ!vwo q] T2(-ꂹ(pr(*YPPT…Pq}ë~Yqi#*W]r]^6nbB4F%^IRxׁ@ QER 9~I޹8__U:BeY?Ǩ,m¼ #gHx O!Y宋&n`%>#킉E( DJd AoKE\DT=$I>Y4:Hw@cNg!5k(%! {>kCz?c&opo\Ɪm\*[TcVpS=wx(C- Ϝ\42sL$"+d%QD2963oRO>BOxQ8e,WdtI@dQ)LKt!RrRR/W3Z!()m١:fJ)j)::b {r)CZ9,ڳ*Ϫؑn>J_g;Զ7n<;@UxL_1L'mW|L}`\d;`1y#JiR!);||=\D&ٔ( $6SUg:maH_dߍS8G7Y ºYE8NnVRİQtSK.K%j` l0! FP-bZ^ peF~XF6JS2t4հԁȴi&Ӌ:mRE42xg8]PA :nR7 Q"z6 )Obډ22xCAE|(ɾFIJfF'o)SaAlWܴX tx \ B1Ͽ/Ĭu^Nb>\XMp1+" :%[C4pތP^ʨ u7SN=dl4̷#[2OQǫ"^6f48z Ŵ-3(TNj0nW:'㪸VR`^fDhbc f[ALrwZIH"9Cs۰δƊKh#ڹ*˳gk{CG,; xq-& d˕ Qf#Hjj$҈+/6ٿssLcf-eL$TyG B`zrO9lTV  jG% t2?}ƞ WhHM٢]xg"9J))~jT9ec`n>)zoԌ8WgFU^mX4(|ƣphC {Ŕ9 Ƙclr= GZhxkTyEǸ/{qyБ֤IZ"90/i0R+lz;rI|T*P+5@?a>ne"qUagdb>yW<;7f塦WhZs YyQq31[N*n4gQw @z2byU\~߾jTT u'{ܓCcgmI 9Zr;AfẢCCq,N78Ŝ9s˭C:F SRw|)7j58C$H=Vmh،B.$~LI uIJr ~zΤ͢甉h̸W=:rwէ..ך"x.)! yM +ĩwBitF9\[WY%N轊/9w+pWY=*:z̔Hb G`$K$eODYH+/TR+q@^t.m%GVf: >fDd/+{l['zoQp UJxrAΕe.s9痳F&i8/1F۠LnL )&]D!WJl:i+b 6aNZR"쐈Bi$y0B1PNHNeK8UA=a%W5Gks4(&N評6|-RXśj Gn}ۉ!x`g٭EUu4 QԺu+UHZ|d`y BU)[ F,X*1 RY^tvBIe41s1!SK\*EACS,806 n*mT+=h͍M;ts>77!^>xѶ&0'脣) 0T$CIǣSvC)N/Sc,6ʌ ;Q1,%! jCen 9xd<źwwϖ؇ʍů<˧,-=>r/nNʓ(pBO!扫I"[^ϣo7!>J&]egMrW5/+*ܖ[Li&S$[&d؎"AZͲK *WE{ 6WN_RH,IeW.^SN *ZZ"t&?͒D)Fѫ8- JijD)|VvPUygZ(E^*y/EV{e?{]fpZ0z.V.1[[VZo{zh_f(m(habp|YȻ.zvϮ u"^-=†LrB~_k뗔$8LSB 9嘝ݯ F0]o$.Ň.ik+D ee$/4XA/[q^ [-jgRޏ#z e 4,ϺWxΛWn^\S};K0u>6wzm>t.n{Xk|x]ݻ[s}̮o&fz/ƽ{T|#,jUw§H_[XP/Eɮ&,?1yW۽M24 lKX:Qhe= E <a ).}%f,hBj@g4I'՜Kֽ)M]P}y0ҁLV|ȚHX@H`iik11Zn`rN]Q'ѸdҎ[avMZZÎywF9pggK8Ѵ(I IwQ$MZ+MRݭ*eZ^܌ =Vy iz?;/~.~a~'k7کdg,*<Zx8} iwҗ@#P ,kB o~їghiTc$ؒBvk)L[v+]ھI ^-߆I܂VlHXQwr1+z"KKp{Z,t cJC>`R=Q 3.]y183hADGX$-*b&n5)XAx2 0b;Z Fm^MZ=cqk 8'儴l '`,V(pK /BN#xƓٕZIMP58C n{2ˠ+ )+&aJ:H퐋i+0ZY' 'ϞUZ81p.UGa$O4O2B(5׷'؅脘`ֻE 6?01R;Em{v0Hd/!W/ؽ>_1~hN.ʯIdiw '$U57Cfslɍ}&B5 1F$ bY2pn`ZزBo\Jp'!AWWi32Ç_{kz/8Ȧi*ܪƺ.%ӳ,^Ɲhڳhԅ^v%C|3\%u3|}7`~Jf]a"W&bI+*X9OE`xu Ռp`W=tPD{ T_jL~3̦G)-L:2 ՝j;nah̻c9lޓ%7v07٠j n`oƼĤ%i"ݙ행wՄi܄DWݕW=8<ۮxt"ҕm4sJwnEqZdP2ˍ6 #Hjjd0wWNO,T}~-CӮ #fS,0h)#{IBr٨@F Pz]OFlkWػŇ׈=wiv˪g\bQJ50DJ` sKbѲdyp` "{Z,'/ 53:6GR̭5^ >w/or_o#qvh{7 j` joY/&UgKv;'@ )8f_Th+KzZ>gO\Sg6*j֍}R1) gVLV&AEGV &#At)Ф,ϡ,ˆc:ƺ۾?Ҍ;99K`-ʚE]M2X gU*qP˔O k*VTSl 0Nɷ8*DwOxf ̽R` $y Jyh1 xC`A18yL%(ZHhPc?ʶ9m9sJ'E|P qN j<]!mIw5֡ʵ ҈Z lk9NXΏdn}Ev[}Vw4'tJ4}AeH4t(F{;D+" 2*qA gaJYЍ11"8t5V16DcJdc&*r*XH%SP̈́<{vhd{S[N=6 't<^VI|peے6@M2&Jbժ-Z,"[TJWU`\,}SS+"e)\,/{5 !>_ kDEjRY a gz8==֕ެ~M=+~y0KcWN)W]1vJy{|4WǓX~+EǘIq(9 10ɑ.Brb88ǵsF|;KH@^1(q`6"09ASom8CfCǔeIPqR#hD N[˻/Ԝ IJ9 0]BCv1Q8evufV#8ȱ@T]EmsJɺ,X,(5p!PeUXUN<Ebn*`,d*zPR'E\wpg+cLjSMA{5˶ŭƆɅ}ɢD2˞ @:hu!z/ZѤ=U"vĐr )exUdCѶPe\ɲ|JV juZdIOz#;JPi#֊,_>0dIirU ȹB@ (!%ip2] Ǩvm"0~.ʫCtچjSއGTR_nL hf]UvJIEDdl2I2˱(_7Wmh-~X׀-fRlTrfm^QzS-ױhG7 W;vX6ǚҷS)ۖKh')䕒M穾g.χ7)0ʃBWlyQ|p[YjBJMQ 2J 9J^c3[4Nhx%GP]iW**M:ec,ĨnKZ^KJ9坙"K4T\Ƴ lu3UCTlsʱPs#g \Oe. ,VHOlam*-IG 4?n]1pu׳veR/\~^Pq(|vz,ſh@_}f<ϳ嫌lvxFU:S@ھAj}35qƵHzv{VvlO[],^]1^uX8~v1o{Z娜NfRˢcENI-8z7e6..KPx'[oH -)@{V-%3UL9"逗)|=XOtPNpylۓ*:JT\~FtIXk{:ЕTry)ef@__🧜Efy%lm.X=|{wȜ,`=Kij__Z80<|ǛN,Th@m[45@kr"=[g"KyۅNCIkڠrG@R$xϻ$TmRjRmu}5h6IQ8F+z 9[gO^ofԱf0u涒btUh˾e;H *9Xߑ/!GR;SSGqM&~}?F䫛n߮N+毴b->Y󆷗s99~Q!C 4VX!r4h"%[ŧb_ZȖ>5$%r:}V/ J `+d+"ikWp$xYl"nUBF[*l:xMזoNڻl< n[1BAtK̳rW^N/5R㻝}^ 0Pxeۊ|p57Ͽ|_Z7Uh[ F'<ōAq[&^O2!K(Kvz=(on}|e d.Wm &d-#Yc Ih,f1UjU2 v#b|>nWRv?߿8OjA ^Gn9XQ`5 `}; ^8۾e4ڹ ~[` ypX`u`bd XMl}&غB_JϫVVڝwٗ ;\ûݭJ[^\NϷ{. ֟~x~}DvZuH53J].k7S{J2Bߖ=8bj `l &mTTS`ƒlCb{ݖ䍋%9# !V`FDa^K-ƤyoGQ+:^pk7hx J<`&iorDUOkOJaֶny4X#*e1Q>\ArW: RM!;g (ݨ]%'k*;p+bǕoG+q!N?Wekz}{oO҈|i>/DBmVv6qbΉRm)uag}+nVxG<`4J$bo;o^ߴ˦X Ym\4\&0tcz(I!b Chʔp]DG=̾ok|7t-u./bʌ+6pƐ!ZU=8Ovcϖ][El䯋ZPٗe>;">w_,׶O{;cԯbmVcOo zY|iv.A;َ7w_elEZZT@x:{+A=2A|Gsy*5g]:`8_HO3@b*Ґ7%`ޏnyÍTWtdk EiƖD.B&*W=Q\ G'Tj;o /'T荘ٽtT|[|ڇɐǗz9,(0+=0Ĝ1aN+' v2>ޜ *κm[8-T"ڛXIi!D65 ?j6+\0,&рRA7W.c*EIP91GOpTh*!,ZS/^|UknrW kmG늼(r*_FdP޵ٛtyJ-~܈o]yc{4lL>TBA?{VK 6f vY4a;7ێ3@-ʒ|('K]* i*<#FwpG!HJF$TcT-j#v˟r d.Y#`*5ZWR%c<-ane* ,t,|VYQUCkOV=Gle*ltR(;kZ] E.7kJ!Łա"iet_P&[|JbI7 k8[;.QlX0>:QɎrv.r1rqĪj03!jNl[ hb+zp!j"d S&"8r9ޜ]+A&ADPK;<,:r{ ]I_)1xxtS0G*S]Sד S7w-Yqy4ONӔg;?7NWouʤfo/_rv;w}h6QqMY`𙭎Ԍ 6W--J SQ.zS f_Th+K|`RWusX}ooo|)wxh`td}bέ s1 Z7 VFkjP,8㴉 ;ƀ#<LKjo]{4fQ)%E)A ffaW+*v_D qe#ZALa8Naw\^kl|pB ϗX>U ipG>K!7y+dc}[94IaEA⌊P5%L7h5\lه_y8W^ }=l `4LhD֚X]yi_ēWdw;yZ8WBGz}NǢM7 >exU[9cEQQ)0 R!pJ\LSۢ}VHBgm8[p1=/Q4b=l B[X/偯. }IO/w$jd!ZjR:d J|\]W~NAM,VfC`\bϡF1ȪOšв0UMwnyY\i1|S`6uzb,_DzlH/#77;v¦sS1d;>>>ܷW'u!B^k]Fš("I Ng ℾfBĵsD{"]RFnjD_-bd Ɛ) d5w  g"C) ˑ藓*#_o}E8=yw/)/ /&)EKRi gMI;k cp1QՐȹ8wX$}{嬮BѾdaDeA@9FζLb^+Y' |^!ӯL(<ؐkDh?4MVI}W@@ş,`W 6q@'_2=c@pYU2d˃Z q`-\ W6)ҳ0[}Z5ғ!_OwENfzWa2+u5'ύۤ/gq[n/.[زBJg{(J,Tͮ_5Mhm?L,O~-edw ,9~R/ycS^VS;$|4pWNN˪ټ{{xur:̻YC׼/GrrI6ZEOuok Z&fʟN~>ktϧ wù=fin h/Uq(m9PGVh;lV>K)iϖzTA5'J"gtj-WYz/}crvy]M~zsB<'重|.x.-F<5ڢՂZQj/g=Y:v^g1ump9}{ZNeړBc&0->o0D~yH<4*}`֝ulrʑ.}\r![l.dʡsI$( Bb2`Di ^F'ZnkB̖ ۨ{QӢ[p9)[Mgpm8ORhr!ݠ)m0ʼآ3Yά!qgq%8BW>x?Xp.ۉrp7ٴ)AhiUCF|Łʏxya"!wZgi,`. Q5zPNu$]b\*JX_SW(**G]tڥ/ƺ!fky=2>Fv݆EYkGpA_W'jܚ=ǛGyvrÿmfc Vf]s]6U:ۿFթ<^-[VYMWq<+0wuٕ$AGLu}ֿWǭ{[q}d &N.'Wgs`}&[2?/W\[_^ZkΉ[!;wV$-w *kwC{OeA9- k2AY{4.i68r%; km$Eȧd|506,2w|Yl0VZ<*=,dvIeڏvQ,xyx.:䝛[} =!2k9B;0oHa[0wd)FlTip/v:r{7 q3iUʳi<ȡ9N܃UӑԳI\uL/TNJ <S]OqǤa~Dy>T3q<3G-ۦA5!5V*QO /g?W~l"?O7nyqy x$q~b7E_gwuS2㌟5O%-Ww"E-"T'r™*1G>|%7gz6.EZ[XGZo'(U@-a/ΡFV'G*5"V5퉢a^}k,iw1xs, ^?6 mjmIP&s*FVƆg3X>Bvˑ|_\ HBGw rl~~>O;_ŽLSQS/u2⽾,_d|ں^DB_|i*{^6]~zoC8LQ^q޷{@GY˵pAb"&G;c$:4I`*Dfv;޸\yrvjc3۹(璘S[lXvReu5m #J1^@hCpǭ(atP9CJ=yBJ)a *e.Y}r $͉T.%cY9`#p tU1HL451#X %QMnak,ҵXsz mϙR:&F2$4g\DAb́f.<,K,-9QTT HG  1G(N]9҉"Ś#ռ 4s"Z;zV鐋AqYq Z̿6kG`~G÷;!䢑)w&(ƋĻkze㢨_7w^.'28鴠Zfs5'-REQjs)9G@j 'Jd 6$R*&X9W),P,P XxR,\(mqqhpyy?sLI*;>OƯ!Cd Qa)8'HdN!QQx'ǐ=XN 8˰ɥ *oGI L#(Ji]9ͦa<PVQ[ =0؝a+x*AjdZ6R3;ޫZ&GHp\JTtIHe}&E22C E{bMKcu!3TTGema<,֜x8+sAbq("ˆDqgf"9 Y#@挈IU{EϹ)XnnHך"opjTl]8*I7(|ftRq:P;źj'9uqq3,|\4.sw_Y(CҀ`QN $,AGs340'e$ƚOgu,= Oa4Y֋xӬ{wksNB{!ԏo~}b2GI+n P9YB8~9Fe'v"Am)/sdZə $P+.V.GH*XX j99|$F/w_6uf.{nWUٴ:,[U6UZ?_>eםKWX.]-KSn'c2vfS!j <=j(َVWws% ٚm{*n>u8΅i*׷#We^ӭr~ߔxnoFT̔pc5//C<2g+EɪJtܩ`HέU?gbĹ{Ta6ձs-q#YcVo OI !l“%{8]_gr堐}Icg9? IM "!*ǣ(E(rt½% 7 Xdqqw{./A//rlW=ϸGuN9]oao;3.W:ûo*!?(to7r 12Y v`ZoĴf|ڊ0EflܔH'B`)* +4bLчT:Whѩg,g8͜*!k14:r 7zvQM*SyV6x2΅HoI@e$SG'/"'?|G6r4BQbzuf}}]b%It4U`@ DoRt@6g拷V'&So=gP\S-֖+PZԐt5?mYGci25L[CDbx}ɴ=~}`ܼ5bUn%󱯫h_p4Hj9E0De<+5 1ȓlWdd=7Fw?U0+ t!!㈎y*큐C FHf}Y>LɺM-:l_Q8d2Q/"E=ՙ,ėJxmU6r7$GV+rH|29/%Jt0.1mI). 0r?|eq>|eV7=NC=]ZG>+c44dof6関]0l)j,esңYի>\;2;Z%묲g+gn[l]fT qh%}m etu'clxvpeXg=e_Ξ? 7lc?tj涾J錶DzED|\tBCsWLPMNjrvx05һ7u97{$n.=]||ߢ~v[Mj㖮mv<^M{D]WeB֘)E46&lv'k*^l;eW6܈;Î͡;6[iܕxlĤdCᅣL*j k,Pd(íʹv^lgߞRsޡ#NDPN%Q*1DPe\J"iψKIbTATtUUbqhb(kyc;>QG' '4t'mvbR-%u3YS q b^ʧ0$z}?>ѩi}jĹ>艝Am)ۣQ0,G&49!@#ed$xV&O\x`ÙS<#e}/M_SΜ;*"PŞ"RbtP#~@k )I[Jd]cղo`26KNt.w-ʞ@!wbx7C;soEh%c^:>}gg㎑EA?{ eJͳt&??pGn4ٲ!>}kABy:[\\.[=Lv@|@KRxӝ^~y5ц_{;{S:Bzz/GRtmvpyM/w" x=So7 㖪'RQE\ 쬼{`>ޏ8|8y986/<4Jydr4/;~Zɱ%C5.9ؚ;YsV3HLa9$E'Kx+q曶=[n}7=o9@qy4]gZ/Nks)I\#P"W9&gM2eN',i"]] t=fMNth1{!ԩXRUqs5Z3Dj^UNaj3`ɣnb9&U̒͵`2&j6j]^u9EIk1hPG.˷ov|wJƲNi:J&TvfLc75ccs1 DG B%W'pGk83E2Z/-:1tiuoXxS7 P&sT!(DCnMqw kd3<#KƜ)n`O/Scϛ&UE{r'65ԺwN#ϩRRmm 3#s2J'pߨ#1̙KԮctS$Zt%GYG7( cm "&LH/*MYA-AXŹ xDmT/  eqmE jT&E]0dTWܔO k,Z!rSȘg"ũN]0ViPkCuYw H82ESG rNcgdjb {VZalǶM# EenD)QnN\PbIb1 qv[]%O@\AY@\ZB44GV>L\s Ը!/ +Ae% ic `Ʈ@ +4pob8S %a7i,XhYGGRP6ZO!AO)zc݊]qF)^嬅WgE]%I[F8lYD^n!G5M#@H/eJFie":8(5s Peo eD\ZU53&M{ܠ˕ukᗪ8V:E' 7cB*We@; B g]9Lfz {[Q] lCDB$a1-7F G^ʓ@ d)< ]I\{H *#``> @6c&ad8m,yѼy`4 O×Md :Ynm[q;*82ϊdP?Q "|qT# l縵BpߩicM0vnzwy$di*9rѡ2,K 0"1w@ڧ=h/f9 (Tʗ]H_>1#L϶^H0na,=kpD-hg[R;V@bz| ⊯hwX-D7+ft aromj39pؐ `$(Y3Xc~XTי5IqCQc4s  V; ;SZ5cfn~Iu2%VKkq M;ct֑pi`dFj3+io R)J]譪#X}X@HHX6 f{lzT 0LCFi.q6c,WvӷK].N.Vi msy04 jԝw@7ם5FOʖ=Ձ(4ߝTev ÚZS@ΓFak7nS^%lg'c  A x "w@j]~>Br%5p ( $SbFvHzDC@}l0ց[v#sq zV"šj)5'7x;8Yy/iURO(cDi)7rQB3MW=uslUpqu@My19w[DnV4H5k*mKcl׉ d, *8rOH6'GFw *Mf,HN!APW Ũsp$k6VP8kժae¥f@ D2\Hf`J$nÌ'9>X2'Y{Snw!8gSMXi\0_%TǬ<ҬdS((`jI*\ k'.xb!wT{d9RT1~5|ח^ܼ\^weozarL,P ;>t~2Rȅ‚*EF)XPhNqA챭ty?;&Z! dKǤ&@0QEx@ro5Q}=J E+J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%WrX>3`0(6hX+`%(F%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@_hPfuLJ x@ hгWʽcD )%eD $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@z@>cRy#R$F UܳWJ9TET%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QQ}Z^Z//N~~7ZM]~s7pߥzQW.o(/ ?į5aiCexe=}w "EyY;vZrrmDL`:Hj 2Ǝw7Uϗm{i]kk+g¢]e7]*̙~OׯiE.o۫ItD^7cy):W#+C2 XVDY j~kLw_؜c^6bu#%.%b:ZQ3:?_z8a~wyo^-Ϯ޶?k;tf=7%eₑX^~:o/^t0&~_6_~WhM&Mb;CST,"5 s, shfxq]`yN,R!bAA`AotQl5P*ȥ3Nl x3b -đ%X>G9MNK%K߳}wj@?S/e_}6wCdM..]vCvevQyK3HWo.~օD|XG#>|aΪ;0w=)N|aB^5Ʋp/}9;EL!,b"y1vus%!+hXҺs運>\}aKBqӖz!3=ִ{!*Z@(N3*S׾g"7a[mE?{H1?GpprZ4lÖm=N2GR̪/"#HjW8RTaP uw/av ޚƣgei>|yEB[Jir KrV\\2HY!rDUUc̲ȱp_DJv!(cWlxi= ;bjfef5sZBED8ɈJI-IDfѺ:^p-(EU SQ ~=M&k_is7:o /gNƻ%T)Oݍ5o ,dLR^9NIc]PL Zf6|A DJ2*[ rF_ V9s" RR-K\_G#>J!%8Eah˗;EUADƬVtHߢb%=Hj\_#sܫ[B$^sinH4׳ˤJ!Msb&\,9z{SWճTdpԳV~뒚Ԭ>A˝91-yQtC*1D,J1(&geJ W*pPaY r/}  J RJTsE %,PԊQ'~9QR4){4rcp>z (Y\bD7-(*c7Ӯ"&];D0>f̱|:ҁ"c᎘ƽӪ{mb\ h&^X^EDpl7n|(_59UTe[o -31Peb7 Y!MtV%`ucƚ8wE;Iȩڡ+k=Ao洰F]厰I6iVM6Hm埬d7J6ӓdO(Dxn>vwiҲO:^JsMA'9(tbmnpaH 5y )iw>e?-;L'"tˡ1!ʳW"FBQ&%^%"Wc{Βzukڳpu!.f*_/+Q 1!M ,AÑ6V@R d*6Ⴃb/"=Ad"7X&CfcZqSEAɃ G:\^bLIKŜ"USBv& 7"f[p I"$S1Y&y)dFmLGM'Ucב]`:Am?~Sl8%*Ó廃yzi {B yɰ!؂nMEJR1Wl"JȈ>GV v*aL {U ~IS>}-:gN 3WR+jPћFaA4hl;gayҾJ<{k*ifT1?mdMC,q-N $ &jR!^D!LkmWEnK1 A`sL㪍߈-ETGՆs=B]vAS;f9]ɇM=卉+GJx'YVFv̇쾭pLqofqDUN <󐸎>r)<pxhEHdd  爪"K!W#͎Z̏$(͑4_i]ݐ5nWvufY1;U-bg m=_mngjآD8f\2PLX+{z{oFhL cT+cʆSGLƭ52fXLxԭ*Ĕ$ }V m>A@a)JQ@qvՆsMMn;5||2! Mq6.7~k7_oWCBZF 9 \q-Nb| B[I-^zQztl\$[zcVQ-U|7Xhl j|2kϦ?^,R%*훫\UpttiM4*2|SZD;gb; zz d6惻fܮe+q4ߝywں5F} z)xNs0$gfȻ 51dR <2RF.1Q)8G%j=8ȥ})q ח<)fZ+򀨋w_xvd@G%`{F1w=yu<Hsw|HS}!9w0\la]#^X<)tw5k{ΓcAvmafz@C8|#ҏKQ9V1;b;A=?_(; jx@ꘐ&BTTb6 H=gchdMtN^(=1ޓwUG@u瑩g>3@W '\jřu6e}';|Tat7zn.=zH:r Q7yP fN7IՄSCq[1yr[vnmmmB :`9*N2cC)1 (sBz0<Hza!TMW3lt9%s2m$%PʛՆs$^ݥW?Aa I1Y$j4EAQee:k31lp'BN,D$4nA8A (>f6֜JRbVFWNqt 2++jQj֞ICXgTPf*kQMNN"pF%Oĭu^w0i}D\; Mu=Q~2_?ʁͪVpޘsN d. bde>(L &8͝f@H#P2gQJsH.HGdl*>9wdεFe6ÖVf IƩTg7w$ ./;)weFhGbT ;3q V, :%]T\֑cI*tك$XHAfS F%+|};|gM-u5j qzbvtjmem{#ؽ56)$Вe%CV9MZLY(%$i-n]HHȐ+mM,c&]!:CNuPXeSN'ؙ*ʵT*[D["hpOPG+C$W9ۤH\Gnn] 3VocL8Bv&nev1'M7\E:3Prp8 %hJ&%'4UboF-2^i\je}Ez^3̃9bg~jIǩ׵f`6w5]#m.!lH46$>֝9jz}uW!ТEj~iќا9NZIwLi'~24F /U.hF.ѓ`ƙWOsء|Y~[Iuz u=0ȽCwj[]S_߱MlΟZ79t@Qa7e-w{pd%WFJ RU:N%EgѬ[܈zuvt}@?znoNRsg.l7m[ jӥIF('^nz_L6̹?:h*:f:|?/IĹG~~B 1JU\$w!e(2Y/0 o(]b&2P5y.OV?u?JvmC䏲߯&v 9Nޅ nK̄J^#AI!X$V2mG-|9%`**g:%->{:6.d6e\)E$\n^uт 2Z$br^Je57Cfsֆ'W/ Ŧ R7fHѮC!Li4軻Yزu|X7.%q'!)@h맙\)OkS&S% UpBEj[u@׭nXvu5Sjfi;m4u<9F9X>3Y5S 39l ҩ̉f3gn2{ ڕ7[]~[%m pNAaIaIjdk06WJ>^Dw5eSӶq/{M@"zϮ"69`R 1jY=~_ƛ8K8슾yTf*647fj'V9lUwX'm8Bv ђ*eYkŸ IJ% ~[^&  h2DK&Jl `QT2vGfɨ=]5!i/$cHj|JxLf[Ml&0Sˤ5"^YyQ^\7~۷?X_Xڻ_|t^#9siM+IK X'hz"!&N%t+𳥀$J]2;,u #0O~+c<[Q8Q'oENDIoNY*ZuQ)N5@NoQIj }~ =A 8-8-F:j~ `d9i9ON؆)J"2#) %ŇJ_-g;mzi ai[`Pb{*Zڂ6ȕ{xm+ʝϜfsW"rרc ]K7W(u=WNmzd FR_e^v_p3ȧdT o^v.@[ABw Qt \Ցaw:TTPJ ELPd:P eR2\5K3|w7D;F?x}k-IW&k|/A!m$pJTT0+,q01V\r4k,*P䵕P@.8OIj1iެ]NRDF&7΄(9ǠP(RPٻ6,W|Y z?`wnA=-!)ٞEjR%)iR|If8j$y &<6}-wC乮MwE)GYG(s-"_,HM,YA- Mi,31!XD-X_*xOmcj&.&lgYxAb)IX9TUf J} Dƌ6Z/b&^c &`ЙW"P= ^m..9az-MV^2^!Si1ͱ5 (_x@>0`-ĩ搦 lgdGN 噴UAŒ5P|UhrJ!Z$c]ig4b`ө,,pWO +:,S;d68I .fcTi,s JP W4+EL\:p,,"˥trXD/@B]^j0g JC+$d B{vH |Hc%@ʂA E"UqSU5J$Xu޹TEoQض%aM] Yy `&s\"1(l1IeSI1δr*YC a+XEer L#+%po]MtF%JE5XUάbKQ@+ho TER+ؕ(2כj\ $!)byIل2U#*DgVUQOꅈ %ŐTY)DDo eXxኵ&]uXgLӄ~ 7Hvncv#e<k1 E%eA;i>By}8öiz:eM7\p 49!V˚:ޅLP6yd-D^x1Hu@KyY=P]nL.  -*=``j>pLqΠ&~cRvQ&@JPE"&LԴPyEB>X\8LtXeFvLm( AJt+Sx[Q -萱`uvP$ 8UQ+ɉ᫈;͊yn[&s˔w2Jk$=|P@ʵf0V [-2'U.X2A )r8ekC`3VR [0x "i0oi3φbUvD|٥P#HDĔpID?ubɡ&.hIҲt^\רtEC@Tv"#DoaL0@5a7^Y|nzt`셏EfXTLw5joQO?2R6I*#JXRع/;cw5X7؍)]-Aζ[Cut(|>?Z_ɍ۝yl5Y}:ٮ ~݆lf\aryvzʅ=ay6濷[Ζmt;uYLѦ7">jv?Z7I{0mTV0kF]P1C[=fn,ԏܞ ~|݆)`(%PjVyy)%@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H +Pc{MJ Qv3j@VJkT0@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJW d05)P {=J QWғkT/H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@z@&%PCD(y'5+` @ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)%z)ɆoG4k\i;Q_VG@hV>?6D6߶1hcKy7zDy<,~ we5F\*$6X1;Uf:Pǭ.m;B758uqv'&n*q/op0f8_M7y{aNdu>u}w8^Gm_4\ϝp]/>zt7w1y9#A޿Q|A-g(od6Y"twqzSe[R{!<ͅm۩HÛ)ÃѲR3Hip=iw(=H_8sU/ދ|-EC@%ղTdrqN)(=zZccF=  &qq`'9^"p|1^ʔ5bie4fY'&6J3}…k-YeJOO}d*H'oG?YZ'݄!K,4xs(0~v0qҶ3;,Sw{v91s4Qû37̽}~Yr=Cr>wH;Vn!1Nҧ{cޱqca߸Rl]:-i5FcV˶vۯI< LΈ. MewPKߝi{d?(N!mi ԟ:RW䯚!ܶYQڻN6Yeq'wN'7N9НI5mu@8WﳵoX0\tQ|ʱ'~4}|n?ӸަEX|~XZ_-`v{|yhz~6njۛ,lS;f6s5E s1|Wz&ٛ٤4d6ΓŁ {og`8&Ƒ#2O p-Xd8$"-$Y,on呢bIHZv -{*es)Vk1 R{nb^F zx4f͛͡Zzr~ó>i6{䜵SU6*\&`_B+qZ\8ɲo}=/}諉E o/RvY-`y<ƓfRQ突ըg&韛eolɰZe ɨXCD@sREI`ê\^귗lpqǹo}|hʧoN~zyѷRKܟ4gHPt+CzVOyYf#//VC4%d:z$H8D(kpYখ5}{?Rq{@&kS1.(g5M US&jJպf| q㗥4V;,3DvL;k1VBa(d,[P2Wum}mP7ߓý~S-3w}!]sz'mf ]GAYk؁BֶJ4?0&3蘀Tu)OG[7麫2’DzkT08lmqѺR4A~j!c+Is\4R^g$' f$iښ:|[]"7cޣht=\;iUj(rl!$ZZ[)vV. 8n2qMң}kg2={`PLt-s.HЭAXy#+_?tjXmr[tIo][j- ovZD.Z {D7~swyϮ.v[gyWp;gDś}:UǎOE `Ōy * :k-]U#0P0722, 8u:b }Q!qîhYa[,-xyq Dk& h[uͤ2tF݆H[M9DYX\]}u{xsݷhgۡݥZ2BJ&QVN5#UeK؊x#1e b:g@%6R} &᠐ 9BrXvVnRgxX}\YY,.hU dBրGS&S gey e(2bV·4vVOIDeТY$lJE05yEG82f3ZA(0f݊{Ai7;7oq1|`P:(1]_ .DHf;=zyv^@ ( "Ywx ,r%>FW!7|Pfj^V +8),ΨU( lcM:WBWP#֐ @2;@X<)E*d$A\^ ;nyD1[ ҄ƒ >]`XM0L!X2%8ԊֶQՖ/Z?sbd]~XsloVZxY%5AC .QhD#dyB*ed>ܗ:eYg=w+*CJ\P- ѵ)U%@9%lfg6.g18fΧt4R/YQU"Rd>jļ"f`#j4M`A! pfSHb3S»Ls 4 ogaL%< {}eqNq+XiV'6Y;S^|<<)6FW.,? ~2Ưou{PFkƻa˼|/ߟ$߯sgr-y~hgq-~o?eSoOxk)>]1^M>o~Y\-[y_Ø5^j(z/nm}];io?/۞BUYlG.HV%3U\{"W>~[`'tH<4)C`4G@REv~$Uj*T wT[uȹft+h仂zgx8kQ{6Ef;2bn;>F_M@U)"C=%;SS'UL\;UOucu1s'4<}e۫OȜҽC"L Vq M4zssgKOH- *g:;Af/Vv\\![ Q;9vG1mwz TF%5LՖo^ڻE< nBSJO; G7MiʍCܓﶒuH263y!.(b1?Y]^||3M) !~ i*CF|Ł*x牚 5%T7k "T[r VJ-թ! U[oCnl(kAUq蘵Kt(UGc]fYZk@FrFv݆JuyJapʵv͑|~kvqد朗9/t&YRbfBx5Cw*JN,9R*ai({&۹ész:Ȳ^c=(}F|(G<)\;zFl}y ")ExSAN aD) twFI @7&1$rOf(N(wP]r^Ȏ׾`A;MBHoeh]jog۞ߞz&!Fc糭,Ć)%GceAg*D|DPʷ-"ޒJZ *|,:$Eh uũRkm+GEȧ]lߘG ӍYlAo.Tk*vaڏW%z^UT-w3(.D^c1eܓ%uF1Jf-(ZȈF\޳""@cb zR(tR$H:" RZsv#c=R YơXPXxR,\m0 Sfov'z]v 4n8^?ܬgFlsBOCɨ%`)W)Ȱ xMD$ ׍h (Rq\%]@PzQsu&ߎIZ!hS;qFl?lAԲCQ[WFmݣv` G%'J4cZi\W@ޫ&. HQg]7xV](HL$yA$MX 789HվQ9sǷi2. Ggs}2"{DܚWЬ传>zG3Ē'/k9X.A{J<ͭc%II6u /YBrɄ@tqsd(wj?՜݈*ѣm3\YJ`Up}XJŭ %tȁgTֳD gh11lų9e!C D@)}{Nn+shzzLFR?"10ubdodx&82G!MxЈHxP[y)EcD&rXr Y0R2233R׮]쎪Uڴ$B47UZ?-5\8ܧT]KfVz{4vG2lnͮB*Pnޯj/+QJ0Էxw>c{ C֦G>G]חg`[V-MsE}c=^-i<[prߔynoBts,m͗ʖ+hdCȍ*5'Y4{_DTNGz09L dA"Nu?{0Ar-U) 0>&EVHE$OΈ&2 8}rMP?-~;>LN熶N_!ں58Z㽐@2Z("YaUk&>E%Emp&ְǑ䕛[]=\{,m¼ L1P'WP.ms;W_I@-o CDw ^%bnErcqt0w  ^D4HZmJ΁(D@@ Q$" i-Wy62 1J2)Xi\)w=hJN [+rጎVo7 n5=7Ƿ, W?>7#oJ34#Qío 2Z#(y2ɑY/䝳oaLd$:d5c7&{Hkܲ>cTr1vfi{hg>NA=/c>vƑCϑFM0zE^_`Jʒք(oO wG}iӣ=>ƟGfkiq J^7k9]}חϷ:nhfyY{NeI7Un Sz kSx3ol)bC}BG^K&}4A>iUFd&v`ʃ;o|durqetmlRSTIiI壩2Lk.4 bqk`V;VLd|Ԗ LjTiٱlCs=/ a>[Il% c{,N$a',x!~xES2 Kh0ܧ|`dzs$6bU'jL[,z@ن3赂*&G Iz$DrЫf&T՚{I1.aʝHYNM.+'*}>Q6/D,0e\NM0=z(Ɲu :4k"#Ms){ϊY'ItR$H:" RZsv#c=R YơXPXxR,\Kd7T=sowm̓?+иpyxNq1'D;An@9ZAzr ɬDTLRˆ,i"g{.xM:(6L36siC"DѦ2bWknQ\q1Yǡ+Qg,^ˣN81 F~4.3s{`0ĥ ^ I$/: kbby& G(1;TT'\e<֜x9 sAjq("ʈhzDq3e@ӪϑVh/K 7d-"+SQ 4M .[PK򍓒lF "Rrj_02aB"ȓ.;nN @eD֜=]%zԞp]$m>jVr .JVm=.n׎e Tz 8!QY"W4RY22gh11lų9e!y}O@24uw4}ȝM׫_HHhՉՏh29q6k`D{}![_M 9 9k 9OjLc2~nA^c|D3clӌ 1L@g|sM+yvi,>v~ꓫ4_+ ^kIX$+ojrfٜ萆<cC~"my0y{MdSmK 9B9rta?u !gv if´q^u!]ߐWh ntR^= Qm>]T6_|_xw$2{ԼRr=L--^R9|GHӝ.J#?n/n~Ao2Ϲpr|S^QB%19v5ƤNjA#T*D5ېg7,+po*ܟu8mϴ^iiTgZqD̪<! R~FO#P6RXmI)sr{Ju*Т<-R &LI9za!yKRˣhnQG}: JB KekYfQJE5Ƅ̽kL5g!tgSw&aFs'VgPOYϘ䣧/D1\W"e.ii")ucaȞlNoWs&O[̟Rp2O $Ǡ>'41Drk 7A䒱*xV<$kirH72O87pυ_u1?~{6f(]|ē63[[-j|lQ>_\uё;Axd!eTpJiGX&`%1-G B W: LD g9kd RED!Έ>֥ Ԏ,0(u,B, iVG8;ڃ)Ip>\3u޶enC.Tb-*_S 67<>\``5 ͜L@qAL$٣!$(EL)g=d]oɑW=H6%M`[ ܗ̘R=j>dI%ICCMAon&iI⍁7}>\XWyb*)ĖDۈf[f^eV>XJe+.bb|iϚ~ꎚ~oF-g}ŌS-2-eQM#v:ey-$4R@ϵPnp]l;fm./dELq;~Tb|8jDmBf˛?;9Nx|&tpZU`hE:}(غ_z qO˞w}p?{o_n<DW^*:mn6 {]SZ3[!c 2n*]7udT+)tYszѩ?~>enn6yt{z@.*"޼ ϻj|]x1 woy kjh=esUG^f4^SZ-vnC䞼Glb"uMfROb !L;Wm}x4;h: Mñ*笅"cK.j1<,DǁA\8E2(wҏ}q`BE t\Ñ7 xt`v5P@TM4.>'"jzQ=jԏkIwP%{}C}ieW}Pj)ҟ ɐ*O!v+v @d6"@q:_Cr)ȼ(.֢P, 6D)k𰳦Z^o쑲KY^\*r6rI~XF[c5Ej]3 >FƇ]WV_w~_58{`wY&QҢ-/UIq0` {] n^3K>Yno;ne]v5hnOVxۅZ~,Q*bZG#jC,Qph5F=\h,jEE='B dp@*'XWt0ʣ>\6FeFD죵`+8(9 =ަy)M_\S6Ӽ,~uJ\ޭQ폸)pt9.v6jsFC+b0lZ4[–H(Jİ+N}P M.fJ!F_n|>{\Ǟ"b4XAk 5 Kp= >_ k"c\BT5){|+z|eI+ջe)77{ yMQ ߱cvy}~xh!ZN^s&B^k]Fš("J Ng ℾfBĵ>y.D[*12} JcFAF; {C CHX˸̷`+!D_}t@N[jJA$AuɑX*`AΚvbbPoe!h]!e5فs{܃k|v Ucl+ 9 攒,XRٻ,(5p!`e+lXAi!=PlMŔLu򈱈g>m:;ْ~;$6lSBm=T%Cq+xC/YQvsP6G82Tb~Xj@U`*6M Srf]K^Q{`KšpGo/w}NXO>Hn閷\\z.D?$ &lBxk<2?kO(ಪ)Gɖ;!k x~7lCX 56 rk/+h)lZ@ `:%~er`nWsRe:=9޲-Ft(q^} ЊEj>ժ*S@Ъug1ֹHE{xNT< .KMPۛ ɇj1'C"d~ D&2;k`>k1}Ap_ &P~\xrf$}(_u;kJMQcqH*z-fΐRi g!U8TC%ec bxbT @#`CJlnzR΂dFꗑiإƈM*T,Gpt3VXlƢsʱ`sg_Cd*)Ƴ[}$&_$/=Jm8EK*?eR9y>M7u4bv>m#oFZ+]| 1һt,z?5(˧0_KPmO ܑF[Rb[5V::!Br'Gs)GeOp(USR%r3:Jbۧ~|!k_kcrvoF_}yYh6όryy*}~}'ɜOvkMMmxui(j/7YLݓ nԴPD=Ze Lc!/a^^tzVt}|K5*`&eMS݃ ;Zk!'l94\BɔC l5hIPp pldȵWp$xYl"jU B3[*l:xEM֖olloӹݔBT -.΅w ݶr!(k;=OLoWy CPl'x7³yx*.P Ty2{m-\T~;Sԁ7 {HȽ?̥A;F1^ɷ[>KLS|T 2뫶`u &d-;Qv)$K.sYZ]u Q%=CަC[\> #TSG4kzq}8P`nuq xO ;U}naxu2=Y4 /Q2UCo䁖״"OOw;| [nYvbru2hWǎՑE oetB}.Yy:Q6`kӑլ#?߫?WG}@pr#)p*ǒ)ll j,'S@`{kD9_{;>>,B/q2$W˃E3t{nvh|nw^ uز5TlVMy췣&S@EĊF25Drtl)j%'kw$_G;ɬQZĎl 8OPҮ>DJjD6NzW@/[a;_;ǦLotEMmFi"o}~m4_׎LʱP#y10\q(I}MX24Ut_)g =:C{m){م9I~>5MƳiWU'7iH.gyY)V <1T IN_rH^-بGwfڬekq8U~nHܐ4[n_Yi픺\"% PRj"ȝ "tJ ъ ^EJz*u{$Umv3 wq 1FwWq:Z⸼dᚐf‡h7gniջtv7mͼ_{Y~!y;;^aȶi /lF}Myn2⽾б|ڻ^Tld+;'!+Qy.P&A0S|c_o7Ws1+.jOR9vUIrjzfVD}HAw&P]Z5  4qq7 l%-u DT|{rݤwjg3;m #D6p;tFuӼJD ¡rJ8V0:`:sw3::^:[JKDIs"KXJdBIrE]U6F)D;MMhRsD Dy &g dnVе; ]4}f [kO);!>ɣy ]Qs8O8R@rI˶9-WT$@  1* PDpJ +j1v:\4 K"GjQLN汏JN汥wDrq^[Ei}'٢L./Sm3_aJy~VoD%[*?7w^;\:^ӂjA͕}iSR+ԥAR8ehzR*$X1wV R6*me,Ubq-mlYmᖣW`)ts_є}(S< w)k2Jxޣ'wܬs@+ vU;uV>u#n"v vqw%?$ fTPoyn|@2q$1=C#sRFb9bo~JIDZ@g0ab[z5S4?\kF?v(bԙ7uN sRjs>9UJkx,/O-79<ZXGLi@L+O #e$PYM,N fqeYO+! ٷy%\g1.:co +Wqb2Lȋ#zq3\!)ũy2r:*%I)BMcyOϣ7Ofܬ?֏%x=g? T]#  >hz6/OPXO m}ݬgpe?EFkQgHJaj bxEĿGku(.(fj QFM4|~lpBmT\y|ɫ=D{:"4{Ցt9Ƀչ,vӖ&~֨n1~=N*8j50y|1 &cwi6qd#7z(~Tp;#b6;@osp9wNa^{& B@+ݭhnڵeoKǼ&w+Mm50NqÔ8N\bmKr|Q$ÄP2AĊ9ÅK*3uRr~*X^*5:^-|1%_ΕޟەaR &N &dHfEy:鉶6R8m~Y5ҷj0em"LِrvTh#a \2bFXp^=p/3PB$\{.Aw:,r.0Bh.W~m]~GП!TQUVzVQ$5ݟ9W9;ȑK9#yiw=KqEOhy)3>Bu"22JfR:#ߘMMMu9ʗ]䩣W. 8DB$UG#AQPJąś 1{ŕKn XD븼%RLvNT;w{hhYO#jC݃FNGh,Ǔjq֧٨f_i.NPÇѧžhFi,ygWӱ37%q|THQub՟9cwڲ5Kǝ㈣یi_C*Vf_͆h翹:Z`g7i nFhen2!Pk'PX (*bB':58IL4~GZ;ebJ]AqY} TQJ1))`t]NzFuײr'AD*sd526*)xNy2=oƜ[_DߠMվH|LEo m?[йj\յi 6,Y#v|AC(U A]M!U U!dDQ(_}HJ5@spC WLGR7N3J_ ݰ *ai "TrÅsa=76(cP IQ9)OtR<ڳ=LQ90@>d]M׎ܭ_!iz4b ŊCbgu; U;lT$UT&JMI N A拷6.fɹ^TKJ"0"4dh<Kڲ$ d4jjy(z/d]Kq܌;\ndy|b7[jyr)5~]KxRT;(wNrHS+ B@C#5`b,?L^ `^{PASz$(a$r\H !JR#*) ĥTL$А#$@3CMP# 82;Gb?j}}btewYE{.V~5"y!%kv=M8B:ke>jӲ '(449j/U8"Tix!%AƘ)cxfUG,Ϸfy8!47<-\TQ'#| ^4Hj9E0De<2XS,OO_>#L#qƖk=f@tA6$D1OW=rRkn/ҷ(>hٮp`9^J{vc<0lJ(j&ѐn]x{Cz@u"%7)gը| IJɱ$Y+Ɏ\s;g6Vha]ζt1M_ʜR|uYz)NW~#Z *9ԿzT/ ڡMۼ兟Ӌ:.eX+c.ͷHIOHqk n_BwН*ywMhr!Mxm#k <22㈞e2.>,#XC=ݬ?9 F%M 嬭sK/2 9Bsᅠā|Jfo5,/,ji]MtT5" tEd!<T0N`rf֓UWf ,o =3uad}kh+SG_h*Ւj}:}7 ֡-d:EeCa<&Uke%"偐QJTU*Sj41^n&$8Yp41y!G!"HzyP#JfFUWN} B>BAdw=X//gK]h>ضE.Ryd )K-"lB06g\KJ~),BIR8c mr:E=%dasBn:Dr5ag}~<y۾7_U-9E_ޓW2K)cOW^vxI Zm&!"Jy}oW|3x:C1&f{@8eO!jPP䖒LcN#"fV^!!boy"9thHl Q|djA!zhN*AQB/cF6n$ȍF55iS7#4Y&s{ܽM̓E-%{eTIeD 24,Z :t9{3dRc 9q/EV Ka1 !b6۔B=Yo:>y cECe_}0,56xV+nS$GQSY1/y-Wu>]kD<QuV!D^XErѡvAzY[%  YP7Ú'<鍜(ь Jd319"` 4Ƣ^ς)>@iWNL{U7طC$π#]t*:2yC9peZZe`!kbeA3ȢaWooEv1Mm)D=D|g[mﲐQCT,kH EfVbDu9d* Vݣ?X6'WYSC[\pxSBZrD៬ lJd 4/L5 1,MnEґel6-sւuUPs=^B~] (9$aXğErK)J2yF#z6ea!t|zeXn\/(؟ѨV o=zdㅇx-헮lX-d 圚ɥh[sGg}l-vގ3\W>{s_ABׁ>3Kvb1ӫnlhI6SB(Mw8@ȇ˜Ҟيw^b{^IO션OjKP0L h=0M?)T`x&b,)YPK%y ENj'#n̽:󑚧K]u JmR{5gYżGpiF6skQp7G8|,qK6syjfjVu7BcT'^* J5^ fǿ4}Z<>[Qںpx:>byӪ9ETP|:gKK:R^we7?2t4oNGt:tV|( 1-d!&-h ME/|>&k,r.7k`Pt,[wI6}g~1S1ݠDy9*[0{ҌpEFnd:k,{r_~݁F}E,kB,5!G> t̟KbZR9(NeVj/k7fJʸZ)o:hL91$5Uud1O(LIF&rs<JxsF8u3ʻ`L:;5:F6. TKVH ,_y*\^lK!9x{tôƏꁑ$-{گ(v x' uj(to15} Q-ǍܛݎǕgf/4[2#c1]&'M9rϤSϜ|9EZ-nku2.wȒhLp7!=(wnz娽g~)2&r 9i&UXkV߻p$v,MάLiI]~/7lyl=X\.>࿯&>y 8α\+e5"F쐗-Kq,h]atdGGhPw>BčG8)ԅnz3,y?@pʻ-z|UrhUUW=U?z|UU=W_yRz|UU=mUU=W_z,K#ˏ FcUU=W_=z|UU=jUU=W_z|UU=_z|UU=on\(lz|UU=W߷}UU=W_z|U^;^],~O=T4ɼ}Ayi߼ԗR9L{S*SږaÔfѧXШ QG2Bb0dBcSC A lZ[yȤ)2Ks)E1+6DGS̑c$:֜X"H;eprFِ I3 XY!W-Vk1WC1vXk `T4 ^&o< (լ.CųiXa0=KCWԭS;}t1}`SMPY93Jco=EW`j6ͯfxNcYh4׹Ƀ_.7#n͍?Nֵ&ll5}՛plɍft][sr+~LR@7nsjOU^Nr}uYJV忧%)dQ3Z!D})r8zU$_:tU7$NmVyIY2'C6n9t}zn{|@:3vvf䗓la3t23~=wynѸ/ꁁMg~s{"עթ]#_iyO[PӻMnaޅWV3'3.ou}0*5Ľs` 灡2w;B-fLisʋ9)UtRAbq*۫b}*dt8,޿ē@ d,BSd>0މ>*!&dڊҴ+ҀlCY) 0$$D$rȕkLզsXQ4-Շee۬e.Y˫^-W㪨|8n# `f=Z&4 FEH&uIhҥ=+Aʘ J x":BST\\Rt#c=R IƱX*cXUbsa-A;h'4n4|ݿ/wFlsRh% Q#`)˔C`DdXY$u;낁`Q` ¨:mnjmBrH(nWk:OqMZq,jʨ>裓Pz))Ty]%^57*$ R:k]TڅB,dbɊ4aML,( #a1*2v2Va<0}Ajq,"ʈhDq3if0;hRL! n1%%@<6qtpܺ= gLi$8In".I%5$Y4Prj5`w.V&%Gf*h\pqw_;9Cʢsh̨lEzHLxx\ͻZq,xxxs*^W я~G?nZgԜNFn6jCt&j$XX:B*;k=3Bk~B>(x E $Ch@hhLB&Ai$rrp*BntjXԣ: [ڿo7 Vz٬^՘Y6~?@x'<4Njf̚H (t^ĢJHU)!f]ϳ&du߁2oVnZl_饪P7XmnlKٮV w-1>v5MW`bhl4 ALq*eɝf:>u4?QG3}1ꎜK֬q28L SDsT 1 N(e(K\ 4YJh8ؙ3~Fͮ=+~mwbe1EV$%њq΅#d..!dk gXA(,=}~ˋj2qԋmsxd]{ky7W3 ƺBF2e*mw9rfxS]p E麖nFݎ?>MFf;0K1-cEtqL̪,dD >?2pNK4y|,k>T.Fw !Ɉ"=MNnfg>ư }{T0ޏItvX8+@ٕKK IOH>Ѷv|wR^a'yJ*-hya>d`=)rIJ]bb|:r_\he//8ͲSua4̋|JÇ{ζ_-×C롻8t%VgJђ:10dDb洳U,MXuuSKo V;g" Cps: 1ɜ&F 2H惪 զVp3:IY\4 I8Y#2jEƮ P J!doSV re! x1,yV{U~{(%`IYs""Hf'@пR!~d_MYM@E} swchcQqi.\]/$T%1/e|V;^6":J^*LBׯ8V{qE{ע%ams.ۜ1{j65.QESBt1&mТ-)3  %R80zi<9W u8^.G}sOv|;F 贼^{~ qyq<1rVϹ]]/o͆ ;fK5yg_pňDיК`j6elJt6z򪗜o14|<*UV}|[!ricZsj_}f6Ǥau1oWU!^H9cig,gjνckTIf>7=uъu ЩQ|DmPT1.HV^0g-ed>NA@׼^7l7g3'9pIϥؒ2V6SD:dr4N MNg I"MHCrו޿?o'*iQcM(SZ*CO9WUV4ܪ4݋Rdkli4 vqc?~&)T0nJk}Qx " cks-(iR2j]^e9EgFE67u6:YІF.\>EEԵHF>{/.Qc `ҖBv=5ڐ]jy;"4#HųP4Q;z`rJc-OXUAEayB@< h-͡]C68U& 'u%jɭZJ)uȆ҆yBE(kGU((|*`I=Rƌ)h'-ljUSAJJ#LeFŀveBȣW4vU+5X^X B,AnTzCZe2aͷ6 0S!$B LhiYn˺[(2#χPʨ[sVtXY7C3a?%17SL3H T'9ZLu>J@Z|6ud&t/Q*ACmj((Sќ .0̑FEUc@I[H( ;:IՆ?5WՕHދ0j\ (e+AGGx6[Qзz $$dAQh8(Y,  ^ B9TE5p4.dPgm ʒ/֞b_*"͘u44U !ؼ($0D!+c}/3*߹a'e-E6 njUеѩ<x mzZI|4xK!A#.ፍPӦ4dYTIW=$V*U2(yCs ~XQ|ItlK)x$ijyU(C Y0⸱ƣ4PH}h$kIu 5<o 7fl~)PNX)j^ 0.1V -liVYI$e(mJMKKުh{9 i ߤn$BG lӨ{ŷ%e SaCFHm0&ǍܵgO607Z͇۴tU;*Dۙ ԭGwH7ۭID3 .-zLaM|v%[Y:64k*%8O ]j31i'0ew?{Fo9̈=Xe9n(!/[tCۚbʍڪ-h'r%5 2T2(eFAjЁF-BjE#VnD 0r( ;ff5ԤUIF i]ӳ kDI{؉B1fAr"w)BB3Qẕ!>sc>o 罥4&:Rk}`󆖿\Z#eS}Li<1_]:r%_~0|.cw/i6ڡhO!C~@lN:iי},|H=xb#NT @|Y>ro1_WxtQz">M27yrE%hݢ[<<7^.r;Sohk?,`EPg9f -{uOrW`E[mv}Y%oSo d~~vz3|T"W˽RK!XT6WJ N?{ܶV>[8.UYǕxl֕Ę"%7z@/$E -ZF*%`0h FOR󁀙! ӶP In\ei1Ӆ'Ĭ<"z\{Ml~3`P9(UgPEiryL@}Ӝ&={nҁ7,?xG1]-3U~k4}*TbeЧw\ُo5I@qbէLv\ &XZGJ1&Rh9.jTڔxvE0h7{ͫ?o& Z'PAS ɵ)WRώ~-P z }gqJl1WIC032t5/x~s `xfWaV$˕U f(H;zM0&]#jtNa(__bc4Jey`vTٱ.{meUu~3z{cV d9Z;[Zlj2k(S g,͗:|pۡt aԛd/?E հȕk7~۫ޏF-&i fv[ Cp4 fk\۳'`>2i+n\Դk`qyL^hxeA/wuQүc[ vV8vH뉐HGZ~TVk0q(+R"A%=80N$E'IqlW1"V95q?hjTO5K&(<`en-&cpƘ\6r@Ckl" N=*"-T23o{'S6ʷ-z4b>Jb%<dg69z6@ٯA~p@J*Ca @C"J`2$rր4  F}>IhV65?_71P?Nf͸vx:pe@MsuPgf7`|^F >{\GX{s0"@k"N#D#$^ٸS bxnOeظ"0"iCezoHÉQ X$c:bQ\XθT\ HN4 i`R H08.A^#vɣxٵG4s~&W7PB,9teQ+CVnHmM]YԚ";xo;<;j'#$ݱ)$>Ds;]~{W⧗2O}J) N~ÛRdo؍SpZ卙VΦ ٌK 8b<|s|ۮ-]\q<++~9f>Uf1ǔ ޯ__6LRm%WfnÏHH+/m:ᅫo.,NŔ],/Xv%cto9`y[\#ϧO3\sfRMQt'[`c3NV54jb>}(Q1W{T^CK5T ]`8kFsKoU2C輪 s &G7׾C Tr?,уIBN{MCEW)ӱŃ-ݘɸ޷/`طpmUE]:\7߳fhFns?x N҃i^R[_9 |yES뵺c`>Ymcoe]b)USl8MPz+$cԻ<1Mw,L94I#;9vPTG,ggarϜcN LU$7v0*`cQվ{&10gLꫜ*)Rvh>QoXM`_'"rz ~,؋FsN).u0"D O-W2ԇ(GRS#Sq}l]ds+qԊQ/:9큻TH;!AEΈHrrH[gx]Ϲ7X),#7\Z&1{$b: FZy*J fy9vIQWFu@{N=2~o|']ujgwH j;&๵t^ ~ urlS3Ę)gS"aV `&LaJ;f2Rʃk ςȳ49ϫf?Q2y)%u9N޼bLN->}ܤ2vm$skTɋ*pa09KFXctVV)4FƐ:>zT!$UjQyR;mT"d#R"v圄x)BbJ/RK b!u%)Fh )<<*.Uh;&j9Kt:p07ĕ81x`ljY_jW ^T֓FT Sl1ȹ5u.o UnGx =M`b!d-vW_b-p/FP /lPqx.1mWBBGɕmq O6aNQ-)vHD4<'$'ȲSUA=a%WYA?19PHg>ǎMr)6 7Վ\&sx;nb{[$/n?rq#J Ƹku$m&܌GPy1-,PĬd+!1^w _kWkΨ2;["F@0` %)d \*Ol:.YpnC+%O1o`5 t c= s;;c)-ϗm+0as∎N8ʬ`SNA0Td<9xț24hp xc&tAsgтC#*$;A Utdl8t<ŶJs_|uw[|ʿWtyBb;:X_Lϔ6}!AFI]BOsKy|R^Hy6 +  8NPdP 41sFWR2pD( d*҄H`6i9I1?.7DF~t9!e$2@y F\I!zD%8-0W%BKBH,x,k~ 1Bskf7`h#bI;&gc;wrRLL2oXDi4NhLsJPP6Zi-Y̠h$00rIȈ& ̲FT8G"W BJtkYβsUrs3y5v0jju.>I56pdJHC Axh`J*cQbKq:Ht"S ׊=/@k9籱D1x(Fxkõ6},Xjs9jN*"AlP͆I_Ȏ0 BJ,$D47>2͑#[΁fGqVuZĹm"yޏ1)䬶8RR:88! gGcNEDi(R$x_69VXfk>HޛCAeȐGoi.cZWHP'c 7|<!J,k ޖɰ:3OCjȢһ/ h$RI%|dW&(D&u绪]]oe=kG7͡_y[zs q^ FdzjH5ۜMpv:*p ǺԤoc7etx_X '0ʴWzD&x R^]͔ES .CbTq)a]ΗPNrishD 㬑H+YJI0T j"3-Q KnYdBvU!<근-ř(퓠Q:*db(JQ""ASs QQEʹ=2np`Q{l7y HwDVs t6* J3@H@$Kqyi 0y]zPB "jsG@/m PN,JV aDZkm,GEȧk⫁n-ƀO[iYH=lKՔ-ђe_,OutR*<[-6uk$=BR 3Tw"2ةF@t8Bƣ$@p@D UJ%UO*<pr)tOhﭐc]W>F U2R4+^xVNǥ? fib\|d^"j%﫝]'f?.U?N_CA&Pҥq i/n53hp7/̯7tB -R, tf =n/DI6,i?z@bڣRJ}k3e<CpSW2֑L݋*\ѡHˈc^k=iFCr\WٶQo)4ٔWyTRI UI])h3DIhEN* $G +tջ\^NXa]eemmbd9 chU^56yo kSJ%m[݌$zV.6Ď3zޗ{~G?6NЇjЧWe|8O x'LНvqP!9Iwى熶 y;R$Amc*NgΜ*TL ,:T ^˒%4. AP 츍@Hg<$dL*BFg}ɒ(%5g)*+CjكUdO.p҃{s$N—.ݐ5nӷzOkw6̂!ixp제?vۯ)0e:LPp)`\ӧdel /$XajTJؔZIAq9@k%B(rBp.hBQ'!rf-nFY.3ڃ!PiW9/#5҉i96>vJ)=SOn=FZ+{^?-|nƿ&ߎm.iFRxjYY_ y2*yS[g.ۏ0w8Zےda b7;y'F2&(,>xn3#v񋺨`ȜH]f;/2AK{ Nݿ(íB`c6:_nZO 1h*W7Kvi9̔!9)Қ Ia*,$mĘhNL(h2 h"ǭ@6AB׻ .`(ZާAI]D7|Lʕ%+ ]8 6}3[>c/涡_O/l'~*q 0'R r[BV%Φ}Mqb, ӽ- tr۸^_BT^@x7NPO hP/Ř0֎i1!Yr"bn> H,g {vw(Xi.ÝL Hɻ*t#9a @`裱&%>omTVz1qo;QGU?_G',>KiTS ͈./~t..p7xDճn:50TfiE'#;='[0qb[vl+Ŷm'*>;B-f#>%rBX+n\JJfnlJ#R){ xf1ZA.{g# YMj3N}Jao'*b=eFy,LE2IRɰ Xϑ# Z$d̀lC3賄)*%$4IH2kL-V b]SOf+G9YU+7f\D^222^8}nT F2J2WAtU52R eL8<:BST2)ښZs^֌J5]XM2Յ.MU[T6)xKbr ,GQshGXc;Q܀t45H-Ezr ak*&n1aD!:Ns \6AW5Qg혱MhU46skjOy2)jIǡZ[Wںi`wrlˣNr\2Ϋ$ٵh\Wn| UQ&(H%xκ.MJP@L &] 'n"J]| Id՚>UQ'â\K>ՈF4M#6-BЬY>zG3/UI͜,"h$et@zܺ)7 04m$ YiZI%Z$Y4`(gjܨ՜=΂]&jOzq-'):IɁza~*z6Nv,Cʢփ uQqlEzH{6x x2{?P}u!a*l͇%]l5r/Ɠ?lHbw6EEG\~|G~Z!ёF% pwx<^޼ФN!0i'H㉄9ږaFiEbiK'RGA!7EU 8(elaszz35,{s>PM͢| n^0Ĕ5k{vޟ}GzH2]5cjE߈?/GK6(΁uq (Qgv*a=};?8|Q\qݶWYp2Thw}!U$^eYu= ;6+{&fwե/xHŶ2W\VBŋqe yk\uUt_-Y,N0>6ɘX\tm̖KiC#sgL @5ۑsԱuX}GSԻ]O'X?syT8Q)& (3,\Q[\eVq<9#T`ƹSlSSRmBb߯nHJ 9x/P998F˕weά0J5Crգ*z5yM%&rdh N& FԲN6"FԈR#RjDJH)5"F7BKވӈR#RjDJH)5"ꡕ7"R#RjDJH)5"R)!iDJ-ι)5"FԈR#RjDJH)5"\)5"FԈR#RjDJH)5݈R#RjDJM#6"FԈRӋH)5"FԈR#R0яZjR slDJH)5"FԈR#RjDJH)5"FԈR#RjR5"FԈR#RjDJH)5"F1LM.i-/] B{#=3,1Ω;-}ISgi}j^<7ͣ j̮x%ϫ H2!{GQ~/=P̃fƼQb4zڧ4lKZofu-( ,ҋaw/jC]pJZ2}Q}[Raz'1ސy^~7iPS.Ofr紻Ǎ~-. x`ac?m&r6ZM=x tȭ}g<αeS(K_'^ sD_U,O?/N@.Kg68=\ߛ_ӛ8vJq_|^r֮X]r0S(.r}>>AF?ꫳ[[nt6w ,\m9lTvBǜ;S} WG<^ Ey[Ԝoy1!>2 N;\qG~L'7ZXv9IZvYvCd<&&-dF`tcϠnJD>ͭC6FI} yk6)Ct6 u;'#E*@ K2N,=Ҏ+: ~W,E/i0G,DB],zE%Bq9>W90f*v8{yWڗ~9:ą{2WZQz~˰a3(7hXgO3A ml:1Ku6g"E6>Ž(a>Y|n޲ߞmV!$u:a5 L +Qb  "mQbGlE>{T u09 KA^&2I],v/p Y$خvI,P=a]W]-D+.~;]m;Q'M9=VMql0F,4Nq=BR@ մɃP =*bVZL8NUT@[9}()gH &#{dI^$1:팜]CRr8kԱUߛoE[SIKn(*ZQgqc&dwG`#ߩ\6FD:2y \Zt)\S(:ȹ+k6p/,#,լ Y(:HEA+%Xe J2KpC/,!,W|D?W䤄1 >d$ k"bqUQ٬($bfce(L1N ai3'+Q'LAwІn ' y#Q>ZUë낉Ul=.ڶ Y/\(6!;jkͨ3J>FHA]oʶx 9mרQ@L '*LU~tu tҡD Z)y~OLup(=Ԫ Mr57- 64}8-}E6#o5=:ztK:GG߲uX:\+Q TM LFF9M0%'~]٣f/nZ,)+)mQz+UAHiOCv}40292،Mpn?eVM>Ţ՝lBozC^L .;c=sϗm RVEKMh 1f0"FtePV[M;M^'I֦FrK`\$4P(c %F x:˶Js_|oo)Z,ȳԖY%[TMp(U/l2r蛬!S^o-UIy}-_Ek M g <e?jvr -(+ LFqFyzDcd Ȃڝ/kt B c6 CZs7D:tҐ:$p0[0F 'dQӝZ{yD(+0(ZSlȞ J̐DK(-573,Eu/IL{9;LVk\M  .DKbGQ$gTOydrFFx!tC탢Q^96^hQp d6^w,gr$f01[仅Qk˭}!ؐ8@_CˉWّހq(A VGzR?_*?"ڧ`1I ^^Q /K:Z䠉1WvR-R:Zq>OBvX!;eKCd(VD2}ݴ`l8:Q{U3طM$F,n8.:;k [P,\LB^ں0mgIг}Xg:է]{"Jv?4V*tYNS"lݠkw(ыGwcMOE::㐇JX:=Uixt}"ҴC---'>䉒MlJayzx׎P DFX kԸ E#ڏS>K^H)O@ن!+Cb H!t| ZLڃb񐒌Q{_ - v\[kӾ>BLJ[ҌtF^D]02Z-58p1yEZ#P\!sl֥U}ѐB 8D:ݩN2yc[G&%Cp24pwM (6yT)bޗYXw0 M`g豀zuNBv*! =? ¡ BȘ%Gkdᵏ $svl^bLj#^$\Ac *f,I9b킱 detu,SL!CSsBNl ,up?rOS '0>>:aѬn fyXy.&peZ+2N'q<}ityQ0m]ɣ4/n5/hR h9S68N.U%i4j<^{ 7=sqh`7q)?dBۋ0-: lTq4<ͬ/2(M(旯t,^7Y stuEjmHi`cߪƸxeBL1fގ40HB@`1ךo&Wztz,M<+ hϟ~SbVPKMxVgv:ho>?P&/'N]ĮBhjlA nc;Oi10s6y˶€ _Dj*0۾|WFe<1W$TmVSxÖJ@D+əKR*^2TF/>%*sDo{Tf) Iy%Cn.&WwѳۇS; UYgw-dj!k,(>'LdSҒ*cc+D>ŋ! X<:AOaT_2##)~ArH!)XUw dPNQw2e5s r>]K$ْ@͈Zc ieE*]}9])Ԧ\f?h6~x5y5l+gbCr O'gJ3Am4šO-y2Z˹d4 O˦)*JMeB icwMiHn|uѯk5%yV6TL%r:@s Id=Lނ((bDGy--t'$Qy?JX)%-pp!8hN tR#ή`@:W>c"FYji3%!9"3Av(F.6I" Ϭ Q) v bs7Sw 6|f o8C|yJm  rNv G$Xbjr64KTHP1T <]ZIMp.-C(n+Hu4Jcy#&QZkI"'uWOq;8}fs=ZRDtA "蘧k="e:/-Lu"6Xó!mD= n;0#,JF֟dB>1V2-F9{'ŷ!&LzɫY5ٍwp'swⷷ~yfoz?VS.4MG~4X‡w0៓K}2Z{3篦݅p9O/\K߇3{t6-tr2~F?ɂWӏ^~Q{HyBS_.WǯM_ 6`/>V$g$mrvknKdIt~ZtI DPeWfm4!(\!Z\{?x sU=\sǣU/M~5.glq."}=6g_ԙBYJ'y'H-?ŇϡKSYF )_^zxz4#Pe*D9s5vx)DfצzuqLrϺ y-殂kc:;^-vA{"?4u8!s6&C/5(65jɑijAܬ]u@]iwl6lv}iUvϖ7<2I~ϵf:khjƆy7F՝a{̺a2[&fRw|f`ºG[8+&ݮоBk4'͊g֨;śJɹ ^:ʌ I!\8 %!)&trؒnw&eHF*t$W R}o;<碿rXąUmNt7':c(Dle;`Ǎžo+VWJ J ?/V 2z=ʢ̫<ъ EwJۥ-dvRHe%Q(yr#H4Rd "K/]\۔K:x[Y}19|J\Vs'bM$H턦DxMJ *MQ\kgk 0Gu3I%YN'vf]vViZv\f,kkՀnk0[E<ά4" gu^X$֛3niQ(nQHFݞa(EoMx)ͩ}4! !2)%BJT1K4ZJh3&Ϯ3ܦa㰲ҨAHB+Gգ52@$._.O5.' 1`lf51|<¥ e Pwgt=%h C·8}?iuuT!XVN m8JJU)x&N )hܷ̆ң>O̷XqDJHL\Xpc(D`,P\Z&OQtTUθ`η#~g, 仓뻸߮.B8M]֗Q#XC^8ȵguS?߆Li(m{(u]ܫt|`\lkmKE;=VvH뙐, \O8Axzԛ0ч${fpaxWcc7{a<w1V=LJJ%‹j:&#SWr4^^}ؿS냋4asيBZsPn٥G|[L4>қn諭LLD z߽a~%bXS١[%G\' RA5l,*`sxy2qa4TF:!VBd)ݒ=]'+-k**I02IfKE&y\̲LQ TJxb+'IyY檛fyx(e*-pfSesQx)Ά:@h.M0*|8 n./K!TPUBj2b)"(moy)7Rn0-)%4:ܲ-jfe2tr-d%S%xn!hksb$(Qk9i O}%tuKB)]ߋ#;Fl3Nj`ow;']e}|tۢϴ-jɼ~\o@ jZ~0WSP<)C7v_j%O")Z?:+FLJ+DXt8y.* DLk/uGǫ$ǪnL8sfT[]P]kC\ytk.XUQ&3\8'Psɕ d{mxD%$d,q-JgX-Zsh-W̤TkƤW'PYGiZcA.}Y{5o+:y,KN"ey'BIAsTCb(ٵ6 T](Jj΄i'-_D()\3SRD3*R,-:f%B(AAT6(2LjH-RHB !j3sULZϡ-FS$9;U> LIQ7o:X9nEkϷ]baQ \%d,p"DQ V>[A3zmFr!4`AIXC xu@CGfbcVb mY >wl:ۆnc6s`qB%>{Ji*w@D* Tbd墁)!Qs:Cl Xn 6j;%ZFPFr"*JFHT{#I| S^98^EC!G:iam;GSms=t{E8% "`#Z%v΂;Vh+e3-,N+QkHZ}P %rh]4b, KvR8pS?t$F1r#,CPdK?ŵ+ 4mbkEZ_>QVrZrHKƎ: P-AS]{o9*7mQ, >]dI#v<~+~DeL~j6E6bbż DOԛ4Yh&=VU{>;JJ.!cQfG2, Zgge'-Dȉ^ԲeCY`&ATp䳎~?&6W9v*]>`rJ%heיiyVKP !a5Y{CN0# r+m*C;/ iMN ,5!ysKf,P ao'v+SW 2dg1Ye#F9j,XbX=LzQEpUj4 !x5a[U( cSFAFzK I QgXԌyDhlD.[Zٱ0_592n([`!uLom9Q(f{UXCTr?8{XƓcjr(wܡNЋYQ O`#jvMem$J* 6y`7OM%,MO[)TdٵLԒdhvD}kd xalC8wiB!eR$'-JA;c!HK+jQzҴONs eӣ (G2K>]u Z A #9L$CP Oqdde)eyHӇߢuz ËA{Z|}&_b BTКdμZ4 $hˏQtFiQ׬OY而ٺM,#c$PUfJiTBOZI#dI@EȿϛvlyG5b$x"/Iy°҆Pqz9Q4^BQI`F2!>?N;董I(#m<(dT<"fdc>as'gB,tpO0+uI_ 籸h~7`4+ҏ, s;mRH_uX6^t;ǥg,PctRtE48~Q?s ΅>ƍb9]w%Hק햹ZV~NB17gl1zjc ɏq *e]U{k`,G1ƅ[Ghrrr&Ϳ䥿ͻ4*h6q!Q1._2Tj'KHqJX8l㢕a\2q2a'g%v8qY:ٜtIvym KQsX.do*^\r>Ly?4X|𷱋DeO L\'C{6}ydY!=Z-MfOC_!7 ?`)k=[0PYM }sZ4es7!C.HH00a m+f:?&wQwOB Қi*A+ Kpc܉1iAA?'ύWq&@`R'c+;{єh|gFMDd6Xw{wjwQEU$vfe~v̭F=}d^uCz.0O5jj XlE) ;/Z6^x /Ŏ>êy 2J, l:̣"NƕeI+] /sAXi]"\vײ6C1[2lcHM֦Q2,*jyݗBǔa-NiQtXY[Ryz|߳`}^dus~1˧? m l0"*I $>*&g͙=yդƼ.8(:x.zc)doB69^$@r "*0ԩAGanS$b:s#w;+UKpǔrU v쩒h|-Q)ݵl]\*SV _[U*?mQBDQbX`(i}9_a0zFqp%^%h=3ʑ01K.HPҳ똭ѥH8"rs.- aXT(PYeeoIƳX"䇃~f׳ߴ/n8_ _&%9'! Õ#f@g"}U!02,HY uC5%Y*ƞt )ؔGZب3v,&C"M݈ZyXb(21&hu*ue{#؝Gd)syL] %`z`IH ^" YȄ 9d$kbbE& T1JӤ]4ՆqB#J)B(Q9w 80CR?ѠfX)*6DEQ&$($]p9Cf=Rp} >(C)0ʹ^ 9hT9,r0a` y6lcok/d?4@Y&n!< H锤.NIQ=*r) mE9}^O/&gSG֒6,J֛ڜf6\ ||yg426{3~>] =,yA.Ss=X/8ᓛ7MLѰ%+DHAiw`5n^>emPr֎/G-3D4]t|4r;;~ݶGY颓˪cq=Vڳz wm6&rW̚]^?bр~B.ԺKvzw8G:]y:ΧuH-[jU7Ha󭖻a:ouuw9C{yaetPt";zG5ijo^Rnt~zEgE+\=m/i9>ӶZe!D5RV(jrkJ {xnZ^1 c?X=f`uҒ47RԻN/o7i鑫T_s9%Y[(@xABʐ䲌J(LD2[=|7X7{߲irlYʟªAi^/ .D~-!)f놤ԠhqKL@QF^H W2`68kfI)M=LM]7a^ȄAfA?Y;rroA2 }kµș*Y{Uϖ/b46%,逢D8HεFl@\U˹,1:ey$`mZ EX TN\{ Ev]/p+$y-^e:tSx|9 Z8as#z qdnW@) jGA3ǭ1h\)-*jH&f züK=.t6<[ J{1p6޾y!Fʾ!ie,Q P=MNPCn!0zm ZdzC[ T3z&Ce\Bʚ )hFIz!& e X 8Zk̏n+&uE$Iѫa줼Op\@l>Q1YTR.4ݻ/f Gޏضuy4m ,cv8b{.$jDe 2rs9u Ϛo,H|lz/\<ԋw6G<79B J! hɰFKb[ aT}}D^K;'<0XfgA6"3se*uLy{T*ɯ vūa[jl*!UMH%TD2VoH $Lz6Ob@Di1i.K>:9t8P b>qKF~7nfn˚THKCQjƣT x'}W303X>txPVw}6?6σɄ[[F tb-uDҴcU[- N7 fy忺U[f5WWŢ=h:A:7?Y\L]Y7ǙN$] MG&:\>n7Ψ4oD+72dYnͫgXq:k7-'rV{Jmf%}Zs[ \AoZYmO[=yE!wqO-1޹q$iUh`3E6nViՔ,,T%%5.GIK=b1_|JW=V`7=Z򼲇x^`UZ*?s^ ͺy8H|#0']>^l\`5~i"H^'ZNx9ݲ t8,縤9-Tx8"[zq}Q?\q租7}q֒ˏҮ^֫eB)T${ظ6g}zj={DWNgNN5=A oAƏ],bWE{_h?=>}G|5crIJ[׌R8*ގULuL-IRwL(`3cRV›16`ntʪHR,-uGGK83ROedȲ88I !ScHh&M;J԰T0;!?`ާ\UFҚw-"91[_ _[wE/,} z/LOůIw>fiV: Q2hOBUOs\ֆ ,U%X3F"~攣,XmtY ׂLQ 3aCo ~x4 AJNjHH I?b ҲcF`4VؔՐRDVhrЗ  IkSm'Mw.A5IZ'͋Y=⢗sHJ4ap1(liWB;7F\r^$l, HL3+l )Dc(+٥$ِ"L%Dtȗ BѸ iސ H9rFb[QPQB mxZاCۆu\qx(&u%J&[T< Ƣ1I±D6V:؜ƌF$Ѡ+.L-a7TCZfőjJʞ˺ Q8 /+CRg-a)-%/bhgo\i%EՂQ#(r3ߋ؆yB܂f]U'b5eXF*xD68(j5 N(\”NkR{HU%;6B͐jPo|ͩ B0P(SP|kÌ [M&^˫z1iכ7J4mKƮwF<]y m|@Nck*qi<G&@O0^J.G[-^U}똂[$;ƍ5Mj,JPԒ"4D(2 !]iIQp¸FƁ~x 0}IEHV/V>H!3XhGW}Ƣs*Yȩ ՏDE}}gD%dəPLp)qcU0~Hv?ݶg{PwDu=¡@6hIn%1 5@J~L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 @J}$B!`-ѓ'`5L}$)a@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L1 $@L}$Y!@0!:L WI D++bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &bI &b% 7 D {8$`H 'wxg'`ePL} $ݥDrqPb<'=*> 4gD}1y0E׹1¡ƇNU\gyYK*RW̉fDPjD T+i EF7穮n?>9{^ϱ[]?W^ށawgf ,2h}/e\*8!r}\oA? _t_X!Ligw5j6иTs(~t&BbV2k0䁚X}+l3ӝ˒8\D^(™Me~˗.pxGxQO&\3 OOooiM:^}ϝЭzMŋ'rA]nc5߽YsAֿʾru6G5wgn!׏܀Ŋ Vk%{RJN_@Ï[BTw_ԁ4oI6 qܶOg?w\,K!vvOak-F|>冠XGŻ(0oa:׃߁.~Pqc&g٣?I"|vN- ~/pn_ ~/pn_ ~/pn_ ~/pn_ ~/pn_ ~/pn Zyz=j5o\70\k\w;yx^ M_8Q_||\󯳫xK^F&{P'̰ӌb36E$EG;;;"oߍ cnraFA>;^ëHOn~ĸ3KO`\/6zΞۜrVls oQ+Tx\7#(!%z40Ck{$ROc(%ǖ~6?zvln`kTTΕ[(^㍛#Ca>{yY3C=_rsg8 >{rr{Vr[K"r\q+ڠ86B]nI޵q$2}P~`ֹno>aTdFPj rH#@9SUz.arq6#0jX E *XB%TbK -cL5<:D{ʏ:cϞ0cϞ$7Ξ6eb[g3^'DM9rzqQfl:ym޵5rg@ ]ސGg1SʍͬIS*s83OwEgLs>Vz4b>ĔVk:] ˕kZs]U K5Ir4r`SCMI*:y )lclkgxXG?kj1V BCfa9Ć'Zi%|&P`,!.ArOG7b_u6 *in=΀}|?As+2ϖ{~R-]v@u:]iǤd8=$0  D:p|w  !0J$(afIE,pp!8rtR%4OlM00E9wL@kNgίB$ztZv`dn_O-7Pë53«dzyJmsf"49 WE*\qQ Ũx8bLN#xFЁgx,(4ZħN"y2\?Y'zGU=Z@ TS.3BB˹9JLc`YACqҰ1f - 6> v&^jCg7(?lyM+sJ\8T$FJwD O@ o{}*-nF>ViIS!e49b|W/>~ }ny5;vFB}19Y|z_4WWEgÿLFӑ ~ƿU .kof>4MCMNM.]\q+~Oӕ?~:?+`g`p0R>tsrfś1Hs_x R 9kӌ}m/>g-S۹>"J½0rs ^R*ɡuU7ͺ.ѵ]/ʷ<.lw/G 甎청$j.o8&sVf9$۔ZFu`o !0WkW7P4Ws˛Us7͆Ֆ>o9ܓ6`<_;I~2'hj^ 1Ǽ6TibzÍ3us+%" #KGQ;it49$rйփb<>oZp+ c%=5"sRN::1dЌ0E{3q䜁gOgesZS͍ftVx۝اq#hgʲb[ąG/ı׷}מEa?PV/=7+ clmҢ+<ђ Ew#޿cĤqܘ=S-ʼnq^U28E(}^VE<qsE5M.geś*¨1 ?{o<}r''=ɔN2I>Iw&E!0kf}Vgu*dN*yF,_$y>oMz;qIׯ^M=] K7E؞8|/{;ML5犀x\.+wz9￟6ጟe0ez_KQC=+Y"̂;p~}D ū_GO07cI[yvGR׏-y+TOh2ǭ g@|I8v@C/ߪe¯qhn7%Eǥ V.J Q.!B(`T2 L6VduH-*!R2*";rhCyjV>*Hg V(qRƂI&.`1^z\2eiOe4qH0d1,(j-qq&TiLRSJ OeU,`#1`Eho "dW晌  G8&!ƉTV_W ^WѱhmR)Vvn3! 3a sBZZ75/8}y_H'm})4Q\DJ Ɣbd颁)!Qsʺ{,z^]or٪5%ZFPFDT8F2L SrdZ"T&F1qEWD6.N09)srݎzz>9ef4'>c} )ʄyfW gS$\rN٨^$glKt2"["4jRe-PY҃*dxfR5c+Th;-rs8d4uJsZSmKUF zEY>fiq y -ҼFC(u6edL\^O+-_krI9 2(6uPh,(2I02 J`*mπ$q,8#5B;4aI0 .˖p=KbQ\ĥ5rv䳚}Llcن&@)@0JBM J[63X(a#QI1^ձ@,pX\P x+d / zx/〘s'ZA-?tzR'I"Jr2HTr'P>h WiMFHx{TӜ޵6#E1"9.7w,L$7XiFl=Òmɒܲ[ cQ,V}U,~)FGR6&mD<L'fXkʅ,CQ1ޒ¬0xBf95mEDhlT0ГlD o.8RdY8-2'Ōtq]kH* mUatuזј;TûCדbX"OwDMI5o_D?Y$$t&ol}mW~߆PAE]ŔNiXYj H%=,H)K@ل1 p4ӄ$rsC/HNZ.v ,CW1r;ҍ8זڴjC tt~aYztdt (Cо$ˬɊD] Md%gdR#G ,:b{f1MZAk_ j88Ƃ"o]rI=xFF;@>_w?(< Ӭ&fG_f)2f227{S6yU(zjrFFW 7OVI#d@MĿc8l!hf{tBOV+(u,6B$=!Ĩ$0EH+Ś4z(B9 a@@&Y &1OVr{.z vs[fޓ `>,lFsǺ[8)׻wITO+Al[ńX6J޿͍#u* ըO;~Q?r ΅?b)WWO/XI*Vg*yyMS3~Qk`l]ZZ|]}9O趮)rR/wyo.|)e1D͹}2*'Kȓg\2Qf(IÓ-~dm.0K~zyrڵ}Z= GZY% =m%jaEZ4޲*DŽ8rM6˙V:UpUc)ssh.u{8| d5T V!^c܉m82z1Zrl&Y >TU7p"0ll8;Ř!|'j!APp#qPNCq9ד` U=^`fu;VALW"YvPڢ=f/ HRah*HB΄ߒU]-`B d+9 J#W+!/A:8a25ʐdd!*f!8lĞkCy< f-aZK4()P92\{ Ev]h:hpGjǣv6+<@luy rJr+oL^.Z"GgsP%p/ᅐ%ؘnӋesqi`}ޜh||Km5B&BtJ#QFgyFt΄,5Z{qj-KTE-v=Wwzi؛Rb2o&^oד>bnZ827WssOS}!9?]Ð^X<(ts96i=ɱ(vHu}Y^^o$_J{&* c[v;X CV  ž;#2=-5& Anx]Hr T &r .q4GcM;/[Fd&l0Z幎I8o|^ꞿzjXpXSѵVSA7TR.ĞA9TIjCe&BV*2^Pbqk0qr[%;xr[Er[E+r[#P]gRނl$Cs(:+:ނzhURBanS$⼷ qYVg.s.{g#䇥VÇ gGn*Mio-AaS{餘A[ 5"7jR%R* "qz'r$KVdHӈ#lCR3wo1&!xȕ^E53pvp>,MˆwИDఎJ^Nĭv^70j}|ULN]+%Ћ {*d!R>~*6c }0QFVf=X&Jk_ F(n?ti5hK{VИ%zR(Wdt֋R$N9JǹII59Ҙ.lL2Յذ.T.<.\mz]ًvI&AW^YczҠ0\9ڌLJ9F@ +5TY1idԛ( B6%6L36 ,Hh4FSy^c0 kJ:ںa;!ح56GdjyL_ %J{`IHR ^"꬛kj ( r /:ȤI$L@P2c.wȩN5]m<k~_ck7%jDӰF4F4jp YhOQ9kJʠymrpPI62r34` aBe2!']"nNs@ӔM g`ړ^TզXgcRr^\iD/N/vzqkt_;9ChA 'K (+Pֳ,%t9gXdB`.C.OY_}yb~!gJ7w7/8Mƽax_ S IƋa(PgfX%}.yybzjFPzU nMI{yxeUe[h^(Ow}v9~[6)Lu%f{ץ/۰O_!ms j]Qzu5k{z3.l ~Ӭ;\Dwv}};/Zb;r̎zLr%4ýPK"Rř^߿A*)ˁXYsU*/ZY)lM!&Kو2U>`wW.s㋖h-qF"BpaG, ڇax*hhyY&`G]2N4#8)李 J=EdUV cVrLV(^^iu;,ŻJ&?)L3es.EZ+/%dN2 !pi#s!v>B~hjRd/p߭. 밖GAR}hq&1]Ʌ/jjsXIŢ؞,q,\ě2R7i+6tea#\GX>"⎢j%Nk^{@]ul23chXdx:'lH/r!;mr nNsk*JcWKeUHyluޝZ=x|fG3m SL7T'4rjovܦ@#nԴljy*TC皞 Nke neO0"[윹StgI(+SB.# 񻗺~<׬${FHŽ`igYPib=&``gϾS9BrOw6B?6euفMEBH_</ $Ch,R\UrZZ22\dkTߊn>my+hV~x{3*.Lf1)Qxcu/^:EOl\lQP`_eaO]7#wvg UE"G[ Fgx6/E~Ha9H%;YiB#E?*>r$V1jd,P#N2kj=[ 3,0TF@'zݫd-{T"^| 5Fjbv(8omgmU473ettu(x_WU_WU_W_qW. E7´ei|?MQP+pXH̊)%I,0"<MCjjƽq1SDbE4fuQ$ ,"AQ4+hH). G%+ɢD9M)ΥQL {72ŜyfN2ryQG V6^0!w7q q(¢Js)7GOz-wtC.ƠgDR0s'E 3K47mID Ap1es80XxGMط%ZO> v~}Ouj@1l~h Uk!׋1f=c'.|:U`2la׷+D[^/VY͝e|c_d9Y1閮3WO2ے6&Vh(T@Fѯc-k*RfR⭪Ks2/+?0$̶4&SsUؼ(s0-A+_] j }Y\Op#sTjo@aUOox}n:1r*4C:K jS"J,1ITvq?s<8|sȋ bRNwm~t?l݅]!yOw^3M ӝPDa"Ftvf;Xl6c#(n+tP&HXar{MpS 6⓲M}S[cq 76l:VJ(zO3<NϨFJ5t1ٲ_;Ĝz`M닳ﯯw#aD$FیCZQBd!B oYmIEq#&Ǜ>{zUvlI6;"՝ɗ9f@K9L e&b~@Sdpq,X3q*2o&cdޙ^=B6*:5 `TM&]2̦m`-*y;i`8 =gOwuTtDC^ q>1%+g쀭kĕ<*xn^%`;Is)cYtFa!Pln̳Bs[l$ ͤm%&[&HLOXt:KfLoev TO8`N $S@{alo_ZmM5x Sh}mۛWKl>7B9mw|vTԒ1F bMRbqO7^Ի9976ī-rEw-;Q1+Y<]Ul6p'6? I\&&I~ ZoDTѸO:9Q7<tb"w2*NɭY<b̉%`0ɍc\wFR*!ZY+ccŖ%1AB\Q_qݱDbۛ7r7|x\f[6xHjɖ5Ln~Y$ OaQ0X9Bp㸔R!qy ;5q;MޔV튲3yݎ5=+#"\ !ԾQ&[TW* <.7 n~QZxƝ_H!l-ZiF?,0/ZIB!BF?'͇}⪏c>%of;q 1H_ QTUBtOOzA@}QQ*σ,jbS&zA8"A2G=٬D7BjArTj븍v0'w:JmD(~Te#l>/"K^^TV4Tw;,GΐO];zHTbe_M[s=d:3$/-r`o`tLGm2eO4EA81Tr jlx+i݁  j\^L[gr(p5ؖ cl}X GyZU8z~p,i4a0t5网 0Bh?6 {;xuiMkoS!YcL;>26 8/j_g] jvl79;[O>mM!P I wn.[FmODѶ8`x'm'nיm{DΈ@@ CX4ɕD,DhI&Im/r,-gd p hf9L{ 1ٶncZjQ+GVjVmKZ+3` bCEGjU[d˚L+ƶc?\7!҃ʗH7?~` PVKOՋȘ"2}P{jAo#41KBȳ 2Q?ooƿ_%Y#p<ex:f1$0J?T x*yFƺTf"Ϝ;b(n<- `e]#- O?<|DR>mbt\"z<7cUqSCX!`<JPIF E/d[XcD0N+ɯ7~B u =%LĄ u{etmm|bgh ̩Wf~=@DV[C"Ѥx?6fjsc [U}pw8N 4Oru5}L~*2Xkb|;ޱ`܆ry[OoS #v"*݀ 𛱩Gۑa7ޠנImZ5Z]V;Ȋ1͊ #yߋ=+wb;."w ⹼sOiyЎFs8K34{5o>[m]ɷ/M[N'y%l }u9N?F#֤ 4ZᘀMIk5{WyqL ' )-]s>Ϣ(!oh';[b.K3ștӒb.a.XCs m^X+\@r ԮSƸZ 5jd4b$:{iOҧT?"e#%dUp)KR?dW?Rfs/|TG[r.6p8@GB /| .kG,` Qb0cd~H>R޸NFjtZU,CA=̮GOpd  sǚPi=֚RG.E_OÜKo:B}I|8vǶp BXܛ./%]NXB\_rƦ2^):]^B#CbCN&q'f6y̋`11T8&ռ'kFrM~*" bYY/d4T}WaȩB;^xek==ٻu$WqVEa>lDZK/e'"HqNĮuaUXWqk=6[locS JY,FWxtCBcn9sl'k)42Ž.<={x:^gt`fMrwK)"oQV^jdx™I( rEM {N)_#@shKvS]A'#.oQv(2;&Ű4&1,-MR+ J鈶Vǣ8`(m{fnWgW(\qd_(44Ek?H)Q;--QNI+,wWj47:.ݥJsҺQ *ThT,pf9pz$Sb(DW 0A'ATd2BF,lF(1N$cln"+頧VkycVKnìw=(akXm>8`b7ƣ9P[em=mZZ\w$}`#UB"'J݈Й™ezy$ɖBJ!XT(LgêZ+}<,O#sNl29m-  x:6]/0J"wL! vKС`fNdl[NZ;0H%=Ng8XypnӇL[C@j}j6!/I-Z[%lr=zbifc@Zaӵ~yyyv{#f xr W/񊘴yd"N.%+kߋG9YV.٫MH^ƕMU !T^v,;~A Xx_H :zS@R{,F-u?> ym;v!?6+l sX2  V,Sd\e)ȵe_}92o f~| [2uaZEMhy:ģjxυIѦOpKs zȆgLֱ.|hE-oGWJI'K}͞ޖZ)mM|moZ)ю8'uR\%T'?Ws؟gY(t}VFzao2)dFCp7ES\B]է/à VBiKFbqު#vA+}D1B;Nk=C"Uה~:HbQ 8+?efKVT.FQ<0#;脍pp%=>#8v*]"֏W-_ taSXwWpY=GLvsZ3W= b_3Jfl+<,5BqoF#F贱W?xc\\gW?zTU"xUb⟼3O~G1ۃ?GJ)|7XF݊F>[`Httm QhU\Scr%DvkbQop`"Wki0\}$kbcZmIYm9vcD OHʀh2nA a֓X[7euβgH- *GuɆ7#1͑AI)#9-r4NeAYw 4)] SG nRֳt ёaZ91r$a@y7QG*zh0nu8 ۣ-ȮvknS;^ȬW3+71b3'Tyte+mvs)Qc%>`ֹ0{GWsα;,*@&G(.xh[A8giLn|Zo<{9)P,Q$Qƽ%p5RvdCX9{_ՓaqYΫ1>̪_R9( ÁKo>n>\J}{qˆƅ٤_nX _Q+vX.l+<qR..3V>NqJ}:|QڳȒYK)~_^8Ó]{(e({$x;mPu1~˳\X c5ә|iB OvrxC}AeՇq`V ,#OpboǺcRNUp܄ȥ#nv7>.#.=;La? 7.U߂9\BSμ@r _:LտdU%!mB O.8 l&i\&KYuFPp1qXD[I9ԍ X^VSZ4\rGouJlmKhMyYJ?gc\' Ljm򶞲3W֬?n̯e:99_paWWK#"5vCF"@Pw?Wlm T ]߫J,"%- ;u@'s%`E#Ko^=?%I{(4Id&V :k=q¤U/| 4`"V]}2:wfo5)5P t' 0d&+ٜ7lH#ܜFڹ%93z7",0AE߳r&BRt|X3:zr7+2d86ư@6t%σ׳E&C 4E>KYGr1n>~zQݛ:moaGW?[?\6>%\'0g? 0jZ7Pi .R컲OZV>Lv4n|Aˢڦ9׹qûp?Faq8˓4|>PYb\hq.֬ثkN=@hY /8-W~0Z`?X0'ms#%uKJԵT] c Idɿ~x &<~)Xj;N=@}Pݷ'`hҖ;5`nCTGD7h}QM=0} B`Œm3A /]`qØtѨ -9l^WJ$ nf_oYKd54IhI_CX e:nY* JyQ+Wm ~reZI!HǬOs:'=rQC؟h L{3$JwcF['(G=b ǭXquw[ѠpojkSFIt6TsS\ԬOo~mqIcaTvkEYG (t;ޟWzhA i> -n:=S6dz?~۽WŠ4 J>e2n/c2n/ˁv2CfB| T|OX<ǚ2xG ) (,A4|rQK[ JOp;γ٣F(Q:`Ӓp{ 3騢״5ٱ%I5E' 8l]aĵ8GarBPBw2r/:Z՘׉]l CB+w%Dz1 $ gZ!"|" x-]|iv{#oOsHK W(˵5)ttE.a{GR3Jta3mNr -G9RhJݲ/^#@i%f$K@PjŻځal7h06i=4LJcl;)VGU}vpFC;lb&qɃ3w哬H:]9!`|pPDy Xh}R{DKpJY=.}QC0y)XJ:IGN8 1tBDޚֹi(zT6GH wǕ]ҡ?(&: n:I)0P0 By6bbfߐNiUN2&0NXl/WqIt&C43ǒL VQ3[ڍsgNe1ByoC=Och>ea4ZL N&;DN Ƈ Ϯ4 xD̸k$+fB0-/_hVbX"tD\u0)6Z1< 5FI Y);I2Ɖա=kV7Jmj9Ioݙ8ͷZjSDspHj!,5WCjvФtFxra0oizwa}Kt*mNe"?b$᤬xN^=B},U&^\mwyyn2yσlēkLcKPfOKg;k~T5LUpKz,q‘{rnH% H`ȡLFԉΙ鳆p$)$TvO_Fkz2Ƈ,Eʺtw>A]2cY>G3OAV_CF$ $兪 Tኤ,j{.\.z ᄥ}ojx[QC4 fqVfh^j>7%'u|\wOµbC<`8l ?%R{hLDŽf|PQC2䇃jݞUb M6vU-F0hD&kh|ߩ1֐RgT$1m4$2p|bOG2uOWV>;;mTCd-|J&>QJ-oiy$'fXYsqWCfrykGS")f)>B BpRSʍ&?=ؔkc_O^_g+?+V1wCʾh VrID PEB1U^zyƠh˖]-V HLN"fqĩyi͙w.;`i,0_hέV!}mV tk1Bc%ŋZmԸϷwz1.6؁imu|cyf~ N#&FOL#oYT[K2Er?zᷳr8eNU :2퐑M"1:I 6#:\Ku(8+pz~qr a :?L"z;!Fr~Jh ]ŷәqۑm!/&H KNRP Wו \qV3i9 @7j; 8 G d"R`9o)+7J餕<ϑ:~,'lR#+ttv;|/$hE)c-'64n2ܙR[P`N?>pR}w-UdA 8f27n҃9RyF&s wUFMݺj?fSaQIPVZ IzoW/IW zMȶSI׼h΄']Aeʁ>?ж[Ȉw0!9J""䤍H<Y޾,q ], P΃;H*9Cc/4f ^cB! 4P2eID_O-mH]i)- Zexl;qҐ9BH@ƙe] 2[6Joޮ~(p4ƜȶSE v>q7~A1Wy(2ʞ}\}9#v硨{Nhu.lO;Er[jw1cc34hc5~.Xrk9BeGv;|{j_m. ̐b-gMr =R3[$uO(vg N%jlym(kv1E j51rwRSI:~QeS?p;h=yNw2&*, GN4JGE edF]bye7&Y?{8n kVIM ⍮gL2mR؊jINCIiIAQlJ^Xn>p|2=BJzқFTd?W::pp).P&43).{A {W[ՑRyXThZj1yS!*63U&JjJd\@@La.[%AKJ.PhPn63Ʌ>u1ȪI)-q%=9rp;E9Kd) dɯ@֏r i2YrNжAX)/3*hg3Y<0ϓw}-(/Sghnjng,8tu]w>Ѧ'܊<&_j# }~ZQLжcp"> X/JM? :B|\ըmyBZ-uY`-l\yˇ[Z#PG/2&%8u}(&T3k"rO50*T=p !:5g4-FdDx ghVv^cэKM8%h3ϋn&m]D?NmJduhrx {Q#O rLO1V AaJe%k RK uwm%,$TV3AF"rU .OU:DCZAU[Nۏ5'h[ aȻ`ypb)y˾_zyk F1hW |wAeM%2>w7'0u`U_ .b)/ߏgK%N,0ZS2Znؾy"|{#vcZ ۓ&8?ck4^iNRЪUN(i@zux5 'zaPLKûX1jsUoTkj`UQ$b (͛NDA$]$ cǂ׹xPxW)I_Mp|f mV?$X1Kb6n4X+`3BX3 o/?$Me'~ϿzxQG1%u6̾&s~/>߳*h7֒m YU.u倃1&;51keoX,wK )Nf:D."cgd]/kTG9Jd''>Yϩ]ϷwأE_" UJ>P5>s,e>2&1B JIEVY!sð9m =}wj(fvdf vٿr N({9P4`_`>mX`.Li ҠRzLUPF0_ʄxO `-ݼX#^|wKo+%>Ph砽!Elӹf)5(N(VZ"k]^ 85 oj/Cb]Zr4-'mUMF Ҵ~FOᢊaKd\dUĉvnj4iH #"cD VJd&Oĸ9L#C-H.H*Ytc:h0Ki8#v$j֔ȸ(<'F`,=߶N4PL F.ՠ X-[)mQ/3]E&f֩n/(^"`SSz49(-ˈ]1[eTo\9-婕Q!3n+S": mTf6*SZ@{ɡif(&,J\AjvOȴ A}>\!#`"}X<cú XxN}g]ɺ`S\[}S',^Pl#[JY588nէW)}tiR$8exZ] )b:LVO)+`##'`iYBƥ+'Vlkֵ˸ -91bccm*꺺r T#J?n"6ƞ(k>%yRRr20Tp )#Mtʋ,}n .Z!>Ht#`csbn ?| Q9sC:=M &Ta̭)ˑw+3gnj+fgf0tٞioz(/bҢ2kqMd at'yEʥ`H2EysMzb,F ᪿ*VX>D#U:xf+N룗?jl4 +䙲%m0hI1$[ΡXI(cD5B+}fI|HXx_Tǰ>cr)-淍P9jm ܳUL=O+{6fMdܠf'ha^C),Ւec$yv*kUI`ɑٳ7!%2.1")oF[.V:{ƿwTwny >?i Hh&3ɴ!sG`PDMs 7SHp+@cNԼ`܃n ~f@lO'bfrPj$"h9Ӱ !ވE;=Q+$N,,XvbTKі}?_0nˇ\W')QfꏒGڛy.+PX"Mqd8u4k6xOsjS5ɦ̪ҫrY̞eqT(jT>;&-6j_EE YoV2,LNY<4ezJA ,GLhXxʤ¨z7Va3~1XJ 㞪'g@1q{Ϯ6b UP%0^t2A"BaWp z7k,l;*$YɸiFrAvUJm8c˞*Q%2.N; Q*PRL~~TlVuߊa91Jdfwxi-FP\Tc͘}nd9!%.W*rg.0^IU Je+k(,g9|cJH ;ya5N`,8;?O5<쑜0d&Jé4&DVzNJ؞0F^ʄD?ERq͵;yol?E;5fW򅣀dӦq|F`P^VK)w{7mu3?t &)m٣Kǣb.R&)-]ݷثZnO[op"J\FKLSvAGm#[>aF%TD>b$W2N`fw\^Nq)=yF`<_p]e2}C1B趇ɃދL*Q~^Zjf5Tc%2Ca1Q 6Ifd#̌}I~dꟘd쟒SBW(DSUB`V`ogѵ 4 !̝7vK"kFkBux\HrOJְ5+$R1UՀNwUvPv3Z5\F]N%2}K);=:YQ !Jd\^sIZuO4b1T[c`i[-J#Xq2/+b*0 8@@ `h<*(#/eBzOJZ|v"K2һ/~GPİfOx 0-b1JYRP4G*u 85*&o/CbZr4-fȒUMf bvWZ/MR䂠p`Q˃^[$*w4NÔqXArFRbNsYR!#QD%]_֐5g4P`:I G)Fj-)DFο1fỊjIMfX}ct Cb)S)3K ; cYJP1 ާ$ga<̗v=SSfҝC69Qt1FH-GRq #]!.s$`GQL4sHL5[S qywMBg#ƌ;fgfsk56`Ph~6Vbuk_odlA_,ܗTM (pė=;"zf04n8RIDk|Qެ-yJ.Xf*$XBO9mŀrm#{ps^%Y%b辶2bS3CJBwTu_U54ޝ0ߒWR, khQz2tI{iE+TcFO)&9_E]zؽ*Uߏ)?!Y`pL"5!I-X>Ɂiw^O:GqL2z[q8=t(ᬡ.bHZFnW{xEDX]mJN V{<m0 skR8dl=Dv5vS=Ǽpz9A6LfaK TA*X$=gszҫM6Ve02d*jhwBu(cw^DЕޖcx{6_jhh)kܕ;H#w~>L!4^O5pi$ 2SݬJչpV4w.'^OiM'ٔn&$Y"ܟ.Tއԓ]gKK7BˮJt=~o^**.K LgP;mV}^n;ڑGBI{ƻ E~ۮ-5r[u9gӑߠ{{2Dd6G~2~~g)Ct_O+:JMd1*;4Z6iY=,Dv^XITbU=DUh̷][+L؃r[mr_CV-YL0 rR\PYXl4!]]< Ml5 !nZzϧ$քWXU^^E_ (E眬&ݖ5=z'U+oUpFУyGwIJ$S >_ &KmR3㖥7y(d%׎(.HH\Aȴ#ʳQ hEo-)OH ։ -a&ر)X7}!ƝL?Ӂ+ 4iBV-2OF赔2ve)c}uB;#^ AZwbfBWS)َ|zuHD w\0el13:ҷzN>Dבw,;i $bն+Wi%h6V=:텢m[imGWdh(r4n([GiC+ ȵP놢aHam[~˥Q퍿i(VHEmj1[މ);C7/ Wm}g !:ro4螇PzQR䕭_GKvm\%=gh*+ϓ6EӾWTuQRk/_LrB2i~gG77G0[v?c˛`:Et44;޽Z8TaYm~mrFJ3t ԗ"('4("RuW;Ihx;>>)5}^Ysc9gh/Q۽g9[WݝePwPPZ'_kXr} sjņ"ber{_NԐJ:ևo|J;,~d|,M\r:w`dy{)ذF3<S*Ji7j$WBk>KtJ\jA'-'7/a+N )uE54zsCOHfmX7^(w DdkF aVv}tŕϧo!n5Q ⨱S9q49^W')|aU/' H@9anO+ 7MULX՛,8H8 pB.% ~s9g-:QdYti8Ⴛv:~q=zMjQ~]%\Ō W=uɸc?_wқIã7Q >ɂsJc:>4ؘ'C*W4pEa84σO4;cDK9)\#0#&?}|soю_u±Rݟ"p*>`ǏWQ> O u/ s<'8`ipghX7r"X7oC\Czlc"ZpY< 󪼂xB7tΪO R`{7qpҧGPJ3wC@8" "§?CbOzDoةKeItIs ʋHMRO!ZɣDŠ2ғ$3J@hcJ'+0@,MCb\LHEӍx16{K8Gp=hϩLñLv+Pt7'F`h@61%HW8aq6-~s_u4ԆwngKW$6>+2+VL^h[!bޢ(4 1f6ac4!CDb|kAIRMK՞aevM9,Zi_lVR"$&8XH4VfNxI"hɻR#j*[sl%O遼qf29nz9t gi@h9{ܙ Is:G;B;L`3=~sH.'4$:Oȝ1<]=lf]>~3=qU}*;OV2%N. {lK< !xⲱr*?vLÕh4:db$*!)ŅVi!1y?WNkNGq>Ct ṯT6RB/tL2-(Xoq6"yF&ʹsn#&4b$REs=E -XHH)di$F*.F'< iN[)m  /Nq(* F9:Ƥ'{4z-S?LAR#\I?s}T{t2:;ftPxA4Qyk u9E9`,B|A)#9g w^{"X8pfyܜL? 7a-&vgç'ʫCTd.znF{hvt.MP($ hIG.Er\N@.$ (b(ɹ$zA>NzRx%&|Qa>o&XA9+3*zXn(X;R NZT+7LJ9+i aa#n݉lF7¥ 2EFZks2-];`:e0u.G71ॻ)$I8C,$PD{c4xz]Bdn/DUj5۝`ooY|]#CVVS7e2Ϭ$=6 Rio tҠ}z9'oޜ_ EcTjh)*(b}xzAg =n)\s.-o3wH7i P2ѧƞOuJ Zq1 p/m./( zˣlZuYKT֑i 0w, zM~(,]8 b  JxDXRZ *qa% y17`=Mw㳴(.ك[JjI3_Zc=FcLD $šAs 'M0(ܼ>:wk6\А7 Ļ\dkI0i !Ϩ>:..n6%.*K@ڹLN쐵X!DR2'xPkz4V+x1J~R' 8::bX8z voav+xya6]ǿ~mXhbggӳUUa[(xc(wijN>1UgC]M\T5tA1!عLXO1uaH}isGIӖoWl/L. .疎1mdS6UeۥR^uʝɷ"7z7i2yˉ2hZk_F#97͵I3Vmm?|$zڠt%‚=F [[6=Ws,Xs]~ aȍ_p{i\>Z۝SIV"jgn8vݧcWjO0ל$ .EMɃ')-q=O'T1%x-<k @iv Fm?$!06v>ҁ&ݵ.l1!.WĞ1at['t;-7(\MO1bDɧ ɁQn-R0`l?q2BDwsNt2\~ }t695o'ޛc]V mm"U1tA1tG @QO) r}皖},(0K YP_`&S[+dwfKs%?> kمM/i%)?^,DքИd47036~ 7ρO!~,M>Ll&p?Vi|E<2҇ )M;?Xss~pU͋ełX[!'WW>Psgͭy4USgno$g96[?/Zǒ7ξR׊yٓ}dp]jkbp9ޝ<=m}v_6 R[=/z9̭?6i_ɡ<шaZu.T]:œeb[jBpr5~,2d7˄z{:McDrJW@FT]Z:oRCyF,<zc9Z!duVѳ/%N%!l$lu Kcd]VGw+aG1Tv߆,X=eiDZ.9aiL2.ef1յ IZ Z,<Ĥ5z+SQh)DIǵc zE15ݢxؼ)1 Z(uBJ͇RCҡǃHsztXEu$XT.=*![.(}vu5%\d`6"U1FOe!/Evy=Q#)}ݤ"Q魽Q+55eu.@zYx.bJ"GW fy^BT13RFUm$/ǫFثVgh 9EZPj^D!E<բj+]F 趑NIT:ũNǢ3Wb''`S T E(oH RխVCP -z/PQ\jɻvP ^{L[qNH%CbY!^\I XIM.|6_;V:|NKL$ؚ%" FxOĨ Vk7)y8򆻖7ܵ wyshbs}sL>ޢë!PV&;^dNghP1'PX$:M1}Q,,_|Vֆ/cIAaE1mSQq`Rb1M]lY>N`ܳ(ͭnUp..?SO}=ޡF5}V<'&=s1Mȧ?y'B";n.r(ɉÉ|=s..y?#ߏW}u:iZ,M_Wh'W_{J.= A+iMnVF$e6ZK NV bY591E;6}Ϋ ;P Ө0} k*FU7R߳M)S#򘢞}>=^j"yFS蹾RV#R*`iԌD (j`/zn"@1k F_X@YAϦOj#X},<ZjWChghHSOM -4Z?^@1 J_e5;c z*5BI*UR3wy(KI蕜kdϘ=VZ~#W2V{a03φ%FT P2-%{ԖZ:uWhdEEzYgd2J~`բgڭN2կXQJ:=i*+2-ڻG{㖒=cjD=^4^}>8mF]w;mp`\VbX27ro>&Fo֎`刺;8-=FJ^p2hm97m5 7գ*{W!Io[^?Z`969@&|z]w|̳qub# +HT6Fd/G;"'ZN=*JD]>-wDd#"o0g?ߎ^W5&[mXЊT䫌_ >6KTi iKvA+cyun̞\ޏԼ_cj^kJ+<ݟDpww9Wb"g'*p7(vYH[l+A_Yh264_QuEhUZX%X#dNkFJcԘ^,SaPɰͲӏmHX1Z+ @V헺U+PC TX5Υz8 ]8b*H#BQ'hDT0۬7QO͐(V|FCK6F<%flBXTϯ7LAgQDkو D—zNVuU|u0qII#iQ,\Xg$f¹B[Ûk֙}HȻm>>ߢ@Lۯܱ`N=̡ׅ3Z ƛV;VbV#/Ҵc&[:`e V)*,@)dE<ߒR8 H r(TdDڅ TA5D ٦n7ϼ#Oѭ WL]JVlX7y0oExwvR.CtQiE_X.CZ:T;-v䴯%%e JI&0XZ+b!C. hӁOClbQb`A?{׶G\Ћa9k\]ۏ1awH%f,f&&ɥȒF#Y8'22bLl31>ݩL2;u.4͘sƊȡP1昅GlC2 07-3;p<@+#BS' @rHh91;W~:џ)z7WEۛ뫳,W]Rп_R<.SY/;xNE1|EƔE <;| D FƽYz+Wdb5Pfn#[?m=uaMKFdot`ʈt"B6wӘs }眮o18;moZ33 HeF= Un*qq͓),3~r8 1KI^gD9HLz @r3^d'8OoF9yiBtd,u{k<Ԍt@(-n]6RtsN-0/K-Lk[C.;'!xE p8K="iC zУ (׻xIY-K.y"&uU/rW INш~@KS=`%gVtxNʀƃ˂Yj` V.hI]hNH/ ίx:p{I\/cbvp`-ũ;zuhǰ~OȺo?O'YTPjN^!_ud?yY/XQQbMVJeOe^20^?r7nڛl9 S`?02o4 byG![T?nm½V4)]槿6 'c'ĉ F~]Jr6zAϻ9~R>-7v\B+S~-G¹.nO Zc朷+W hMp:PyƾsZ0g'MvG/y2n0岹'd;o .$9q^mGxHvn  RKT0w #~"nl+rH;>W o[j,qi=9%H@ȑl4GQ4GVo5h+HŢiN[='QAI&  ӗ»l>`~q3,7/S 0RTj l;oT^߈/\(_pd0h4>Ȇ廍hM,UoIm+C^֏[9{by s]p>ZYFpztO>0H˺JYMՐX/ 9"j* X%# ɛ*4!~蒣u`yb`Hb=QhɏV+ďo^ۃ;rW>P+>3?4$mA((! _ʩyAqs푄Bo*4'myrASAyqIO^d~FE0ڒ60SFכLM*4Hӓ m?y[>,{jכeٗ_`E.G⃗\ԇ%>9 52sT,ޜ67w6_@<-k3_Ft0g\FrnAJlLQkl˗hHG>.}4a@˨lYT1 ,/v(44zmr3lل" Zs#e|1!}M\6,dk T0<1NDf-`EO.PFX6Uݳ(?%n$ ^DZ@|&A:@>hHG~wUn+vWoJBMUj.^MɁ S!DxVIYbnh ɮ^k| !b9:\;j_:Y|gUwVHv4CxHL L Pu;I@(d#;yږWү:WN&߫ Mh?y6:C)A̎D|`Fv>Q8;9Am$̬̚A%rZQA/J+u N:kruA`PkhC>Fhg /vB?n^624<橣",|6bq!% f7yk|kS>.(^ '0\p)Ddެ(F4F~hW齂 a0V2rʻ|e'jL GP̅+WxAduIh\ꒆi )sH^BBt N,EqsB u #DKm$:.IVO̩x2Eʓ8!! ]OAe#@ h@H NyY׵Q T!!"0gv7?ݖI%,#T1jT.ψ4Kִ^G|(9dcOV"&9r<eЇ\>[hIf/]'\J.kSEFi/;lq6[H]t-xj_UHȧ4Nd7|kkQڪ!Io$*Kx!X_w8G?YMh[95_[r>HȿS,McR͂yCY` !;{[QBⶮ{^Uo.%o*^4WA+ {^07qyt?]U.a>hEFT6;~[y0%Zm/Zmߪvs^p/\4UvV/ӎnYq>H31+@䷿mK1Nܓu~DCs4޸ kU&(؜Vo¢^n '97Zg<꒍+ךc3VN)s4HP6g_ 6ZsߡB%0E! A}o1xlM4ՂlUFY꣐C&c7j޹l<=hS7o Wve,tqJ؏xLM.(SD}{{$,^fzNVgo,dVb Yˆw&[X0', j̉T<;Nkm',S ΅ -U~xsjMC6Cn/te-){(vn= [w*ʼn;GƑl] Rq )BQ FMn4PMb6Mºp<5!Rƥc_ܵY[<֖0`6}.;3vMyeAVkWxSEn>/$9qOf'Z +T݃}Գ+|NŒRD[~`$]ZdnT.ʍJƎ$-eF̹OX 3PD*DP;+iLl@[nI)d&-Cms%䒢b"Yj Vpډ(zUSN`+bjΖ:lI7AQt,ѤTR<@TU"/Y|Yh9?WYxE*Adik@n5}< v2^'F6Gmy6߼cߊ aœz 1^sC"_9Yo6}N"__FcvTa~O-rEÉ3Cs kΠ.訋i15Qq [ӦZk{iCjdZ.P3BS2@4l+,\MZG h,«lVGE*>[WbDҋsߚրwMX ߁.- RHh5VÕnf7+6L'P a( MY=#NƶRcWNkdE z l(V8FC4E@ {̽2Qb)+)&(&r "U<1{-B}n>MG)z7.r`xP|=e\yB0%>^(֤@DU( ^q1^5њݴa`jdA#%$Lfų"(Q'вHa/!UPObĚkhJhcd_4XOWE̓(l/e2v "ȕ)B3 [#%l# 0аzɢΙZ9z\iu*wtZ!9rng!_*n"~P-~\npݬBMϳ!oM&'!f4B/2$ 0 αj܂M\I!{ќ4%{ȾZE_􍂵*zH4qBum[ JLQ\PO蝚bҮ\ cnɈ3\:05bҹXa$>ȏ @u"M ^ MhSmG Z:u Q|(W1G˿I4T<4Jy'TECNΦBb.Dv L9:09ce(k]MhJ[4XԾ%4qDmVI ^`X$VC $|8X@)sP n)!+8'@E6kr_;y@=.7(&FqJ)H;!uMIl n'yBß@ OBd^Vw30cYՠ'2Ek9湬o`={k[w7'Y kߏďDAм=Z"ѷi=HJ`WiwZ軵ps1--o~~(?5"OYR;?}\ ~O/UU~$X. XqE.ĖHy,XPGѢTˢ(<mu# 'QD=S 'Fxp^-=d&:2/6H:Tb?8O_cBE*rFTۆTǼ"4z(@ʊcs(?KSɹA#vUۻ g Rxi]BIq,^qs&5[.O |F{]O\1V:Užn@׊Ɯt?bZYϲaϯ۩ēIݞ^@T1PXH"c[v^CN `*FNV!R0X5hqr}fRט?(2ן/K83jY-1F̩dgt8_?g*!'Ak*6 QE!'}\2娍ou1FeVs~h(Y›c,U}ޜJ&{[=JM 2:d %Aɥ25|dJZA8Id*"x dꎒ Lik$w ԏ;wVHVtrTfCdf 'F ڒuG#-*vm,XkD#@Hݞ@;bU朚D&J gh]Z}Ўh[^#gAy6s?>Vؠ 3^~>B@,4*fLW3^]oVd"ǫVH/)iŠMDK໰< =U~dvB;";@NSsJ(qVccpv/ڭTL;nZzQ+xIЎt;UJ硐<2ЮSXSKmyKF'*)큃di\5-a]U憕lzDE*e@_qlL52 PU]DJ+cu يJ}G!D#9 #wE| &rUai1Ujǵ(=J9sj'Z]j2hGt >UYXW0Yfj&^}8D̉kNl3,ѹa tn#w6 ] ]r_U/~Ę$$9J^i0)Jz%312Wl ,M+ɨ:mݗȐ>6~-b6gKT)h UEY2SV:.s|~,@sZpڐ-QN Osk@8%謍FAmMdQi;XU!*2@{`Pr=ͤ}{ٜ3eoU'$OeHnA4^D!gk#{J4`az{HrwPj˫o7,A6BV:BR2^A{4g >Yc*XekW:/ɬgBN̓Z,v}de7_TIWp)-cЦ]]Zz*Z{`8'Dr]@3 xTay%,3*xxi/ Wkέ]teĠOcQúUFm06Gt5` f4GZc0G~qH5azms䁒DG4Vҕ.&`aX;Xv=9F^>rl (%1Q-"e(vp]@3D R *{[KN4uHT1oѼh_HoR졻tFc_.:O&d6ו.%!{ Gj0MFII \+C~3\wd4o rCQd-_>斸xFTJV$٢+F=#ՌT;U3RJ@+Ω9cȅtAhĜ.j~C hr5@:o~q\8#OWlno}}3Ӌ>r48nsy}z6Ngtwgauq<`Ca/rs<CFKT*.d./hNhQs"X0ڍQV2ٌf*9YT>ic[7uIlLEg6X}vTKEBN֢@{Y;\bUtדS&zbJkbq<&*%ke)Qs/cJmdx_?yON؞$51)w}Uo)j(ɴϨy7 Jr5i* yIZM6d?-:OЖtBT k rhKx>PJhݞ@;b9?lNi W|,C Ͷ )V vVl*Ȣehש89ӚNL@1aZp}Jh`4X94g j[sZz-Ge&.Qxd[ kƇQ3X֥WҙZ{]MRL+GfȒA@~JDxX HJcDU5+e8L H,k#ikKT5D% "Ђ.KJGL#s]֡s~罅{.` 퇀% 2ˋt10e%ʊ1K]R;Ud-rɈ`hYQ᷵k(a޵l%8'P$kf4HWdkx&u#GW\T0XlͺڼE3- ҴY$)o Q K䋾'@aJC)=ߌ-@vT*Aגs% i:ph^^8چǮI |ʑNwDgJs\2';7兯.\\yqUKT5#RWK1~"A Y M*x/@gէG?(\_jU}|z./z?` 1֧#r=*?7[lL*'4|azoJq W_ ?>A[`G 2#]Qr 2Xx$] yCu]A,|u~xW 6 Ns*Hk7io*6}g븲cW<'dnY%{ly^Tl1Ҋ"^ `@q@Yʤb FKuKf)OR믛jf~nxէw34܀h>\}s[G/Ms8ߴ"YȐ(ǿ f~Z4m%y`M}[ˍXdjeؗr4#:;56##l,AaDME;LX&u,Ν/celZa+~kr nꑴtB̿]|" S|lAh2M{47_aGM<ǹLL~ҟ_?| iۊ{6& vIlٌme%c&v_Y+@hঈ] Vh^kϩ'ˇ/WsxǝyFK !Qt8x/0JE?/ܽ&t &jfuoCi?w#䫨Hkk`< na}&Yuҍb>~ ~cCuNBX^zxq0jwxjܵ}؅pz}a8pi﨡>I9-D<JE^C&r'[2?Ǵ;^n1s 9{ w'G4٫ӿw~˳O4rk$F+iEV1&׊ʡz4PnLTt wf`RvH5ûOǻ?} &)&Fgv,=z,LOcG全ejh0;hΙx6]VF”Ͽq1q;=v =>li4_x\HRր`Py7]4ԚG^m_}.&>"U[*S44Mhq% "~u9ੋ_ELGpN*hn@HQNfOvoPᇝͭCvG ~@rMV#frC4ʉ*HG׉,ǗڃkЬUopb/[5YfSO7ʀ5X #_al1{vﶜ#+v^} ׋gksԞ$LEv9bJ4fz*KnbG7}ݹ?:WxF͜0̙krgwSjR _l,F (^D3bzfEs_{eKR?0WN}hQaWqDJɶ,Fw%IKF$Hn uE$vzoҗ_0-lЌ޹Q~#,FkS%ӧ[;yFQ!z5۩I{IZDߋ$}>vڹ}$*)HllxfZ|񢴼.,jWAo5rkϳ7*40 .E?wzυ]+!rB/FUpXęPۿeth8JltWocRBGg/Я5k^X`wZ'v|z\XcPS_n&cz˒o7~͑+*¿MM}{r*h}rr2dڮ g)9;GݙO!HctQZ^J?{FѥJ_}WaT}Mˑd/de)Qό[Hb\N"2.H~W<ЀFQy(^TTD ?d/di%ix^c g[|LC G= !"%\>Z,iLVu~[hq1m$5g9 `uHK<(|<8k.T`B! %S^JgLhvV\~U1Nf D-Nͣ_X$_ߢ]\GD[x\U.; HZzuLC޾Ibj~fWbSJĎп )]Z=3sw1ItР\Zd69Z؛)Ȏ (VjtZmT=|^N QJX1Rg k Z¦[!b*s|KRg*xY^Bs` r:f6`+cn~-h?%|wmJszp,hj"]?쳉ɀ Ӯ1j*.E)l@w\?;ڥF2!1>:r86] j.~0{?*a,DO}(B*d ȋ\a /bB3qA0,"88Pr_<0=\[H)l"3ᆃćs^i~qZMVLXIKx% zxVw1L9iiӺA L{LQ) ǥE +y r&3|02rVn-~iKwrO HdS?,ga *u9/#`^^4ifЀiC47ycZ3Eq5e㽑]>Y8;Nu}9+wV r|3 |o|".n],ـ#4\b ;>\"+]~5њdqE^Y1IZbm\Lo'"ρr69z+ hbz>\qkBO)}s,5B( Í)0PuƕZj67N)`RyMgwIAwubd_IEΗe"u:{:o8]AnI}v]S8R (w?&wox ~>M9B̃#?9pbSD'Rڜ Vb߬Z"qX'T }&A25*yޥɵ[t/TӇ&o 3w_Sg}Bk6~wI"IA1Kg~^'`3W\rn,u=UzᦗҊ23 ߅yE8% y*SnjzRZ:Xo҂>oMTNh-dź!Byx[i fՖߘ6"^ _33ج*+/ߛE0 O+/™7͢@X),q23a#s9TIXrG͙ru)/Rs[miMqK>}3t Ff]\DL)WWmJCtA,!km-˖d2oci\k֓;n˞jzxܵ|T#YI}or9J(r9J(*e4l Ք܁\ƃ󈣁({3(r4Fli܁NZk*^Bw^05ފYCl{b 9\A!Ea*6DIX):TF6gr 2/kwZ2K@=ks/JXhf"N@g0 IYp  (5X/Vy>WiR I~5#:j aF aFUpj5"88GVh$ DRG-4`-#&h I ]JcM+C5׮ܔVv-c:z$Qk6TŻ we7EG(Yl3Gng̲ \>pn;D ~GQ XU0Ha4 N#8#fͣi,1! 'A1 `+\Bz FEfhv<0[3f)ͿxGH)iK$c*GrK(1H8ms6 ]beCש14]TȜ@=c<5 `BN$#NPYJXx%e-o 顖W2A- j HLEBd*"SQEd)JY HЃI55]P!- V,$# V)$b)O3g.&T?1ҴאSA^3"X%(+i4hH7,WK1wcʙ oț׫'<3+ 8>}{ |0 Pu+ !|e?ތǐS\e^;% Zdtis[ɇ`8D|w|8Pz"!b).*?犓ޙ"DF yߗs>ڃyholױ7(H;AHik(c8I^zB&S-4!~_skJ_%j]vxPCaet|?VRP.f[@a%JZa@C F.Rœ:RelțSV(UɠtO7E?;d@Ob 9i B A$Bz>wlNGP0.lWE`*T q n~ 9$ҍꎍ)8$TTLpK7T=4 GŻ7x4Wueh؝I9탮WxD҄%6Bހ ]崍&͉:*B4uLB3ydЂ7Mmtxy?J*0ޅW%,9)'$A@VE M +'_ S(B]}H١L\Y`ie @#A;n:ڦz`KŻ9ѿ59xxNZ ,R`D#A1ʼnD !e"\pIОLCF[9Wrs7&P^D@yŻsf/Ui *wEk̡.ؘ.?~`RV:;;=`yNԙb. b3` LHB,%h&Vg'OI.>ylH6b MȃQ2|S`5CvZy/rҦۦ: e&sajCu6TGiCu6TG JRCZ룖HyL~C< D:*8!ޓ6m+WT娡ZokRϻv9N^m)cź,ifx߷A꠮PS%$F/t8ARH[l:P Uߞ%E)sœ?;VJJG4ax9k; pM M*%bQXxeia\Z4vц1E衠 ,=_Eao!gs\CG!wJZ wgTQ-'qm K1cHdONgy8VTO\Nk\(jw Gɍ88ą!WݿUSd[ZZcC:_ +'RUhk5?E+XSeԕJUM;EyF=A/׉,/R1u?^ *&M sOup# 1$#CwXrpPr>0o=2\1Dea$VMΤ32X!$m$!begUAB yqy !9a]wxJ!gQ[xµ"G"G4Sw#gJA{uY`9*;3LCHƕ4XlᏒ%-zvc ?ZHAJ8]6WE+ѢҕRt>]vt 28…3u"Fd %3bMX&&=Jatb`Yfd8nm I\vgSLeeT5I-y&̿% Ŗւ | LhRH`ZŢdk]hTRohؽW/6jRsN EgF238lø4ndv=KZ1 *i M8b{={O}l1@p8c&$ ;YDc&+l`YŪT)Xé%`DWLRMFGĊ@L$ Uu4yX + Av =&>Fby lMy^ tW^Gu9EM( CKXop6hԾuA0=+Tq@9[9 ~Ĥ9_4 ¤A2qT7g:KHq,ai&~1U)Iyd/FJD!lʢ cL滠YC[+8zP8žRӟp"B8I 7[q&8;S.7%lߌf/_}dmk~DlSb)x>u.+QhиM&ڿ٥xӸ&рak,5,p&!(d14(bHǩ Y8V\ZbюoCfg98^gxB^A8v`1zuU KNnkENb1$e#e0b$t] 0Zq$*"{ %@+c8 Vi1!IP%uVZ"j.iRey_% v1 ?;pi`0 z6v.Yn/Ne4N^ pjhGY伣rw g~iv ΐZlN«#YHRj1vo=Wk&Ky2?B(LVb3ЈloY؈~'|A6}fnfn-n26ΌM-gzgߤf|g359ve.ryyñoA7|vw f_$9]L?I>]lwo/C V۟uLg ÷\̄Kߡkstݏ>)_:_.ë<kUo}٦W'&/2/z7 Gd63.ZgM/ak/{ ^6C7|.nEr2uӶ_.vn̗~>q['s֜V`Xܢ>;s(cy3t83M 7Fۃ|w,]mneжr29eyX;+Mj.S |ٵQRM9ymRMYhTcmث`y*xmTQMtUR(AW\#]ա/X`ʠY}A1߳dv_?OǨAer^{輠DM%v{+-^4\kk$tZҪ(fO/Eb >,|xrSKݶ<5*=bB1F[dM:m],I!C=úZjG%+'9h}G[aSpO:iC f6"lk$)FԥH(WgʉXkX9ۮ& CM|V.u~vYls4l,dfN.)Up,Dq( 1 Jӫ׾\ZQ  \f\TIBy\0_`bPc.5"LXCHi I]JT UmR"!f TaD15 Vg+M`a*fM3U{]|_fU|fM*J%-}gm-3Oo_:a<}OXaXex]}~5嫺=U[{/~̰1o͜YgxofCYcN]~5ku 5ٷSߌsEѴ=۾k:_mmerC;tW(d~TI$#头$瞅9iKš֗SP!n~YčLHp4riK345n>q5nL;["Nȩ3è$v<LP3JaB}@nD"f2~6w-8@c$M$.Ĩ2}O ~-ϒX^F$aA{^d:ٽ7#y{^'f CE v/ Q*"hDG;4`*q T,\w{^gK1Yi\HR-wJ Gb@.7@{Vg^zU*(bWtt"Tk qCTDTGb@Tǰ򂎅hD vQ>5X qPO)P)t 97Nƍm#s'ɸq2n }P&T,ȁc%"a7Q('gL'gI^9+[+__'y$RvS)٥R eZ1dId4I"rM:9c1n(=gHQ{lox5ލܯly1TIbn.FF`,lx*<%VL[I01r,Ȋbd R@Hw(>ɫ˙܁Uoƶ*\>81 5:{@#XD$bH!G22Jr8=N0Z2 ;EZ)S&g#S 6#B8WDט "Cʅ {hcˈTrD i85N9(b)-CN 1+iz:MDj;/:$aOsF1@FaP4w`'T0G\?vIY0k ,T^_e#XW2Ԏ;hGXw\ yBd4}!ֽ*Pl%^w+xG r8&.\([H;S;.Eք3 \x3`A`0Q ]_yf^}Qcba*8F\aZn cKr1+elJid(I (%hCZ[]paأw,J5x{,9WDŽC:#B#5 ? Fmb7x0 d_c{rNpx%GRX_zg6;W^޸agwӹʩ41AU؝ :oDl`KV-G<nKi$Б!i]+Xc;s=`䊔BpɎOo#:}-~u׌0ηW[{n4nC *uc v vڂCށ*df#˿B 3iW%vm` K ])`ꦨ&En1*v;]`zt6vZozeT|v.n4 "1^9 j CьoRM&% hCOf۷is]ߗJ9GհNx03δ|Ftg9(r6e* sF^w?#3 B~Rv8(rꌜ= qhg kזpÁωhf_R/k<;\#}aׅܢk23j?3ϗmX "P>Ljʼ}.)0ƒϺoMOyuFFUDBztr1FqxE&QO7Ȣ7%Sw=#wVwSE F]_t6zW#D6LnWCS@/ꝣxiƾ˷fhE妧w?{B@Y0lULv[ڋ]r9 #"Gha)5PxH%FWJNxԸ1 2H ytIo +mG/PF\ 1d- Sn?`Yy!Ax <6LCcP–>ɫ2u0 kUȀE@k-҇.Y"[jnK(L!<V-4!HH0|VQ%E'%.⩃5΍P "jP@egKČ+'.^'%B&\@b RLsqǽ.4S/].`FH=1 Q:Ɯ"ʓ.g0p#=gdY\=D.OcXsLWtC| Pg8PYcq*?=sܮxdT0.G89l>} o朗-M'"]о0Z`,*^L#- (duՓ'W]_4rs2wyK 9Ϧӟ:u C*X*uid+"Wѳu!t2X4AN%絥^3q9C(:/?#Qf@jGGJ꩎`#dCGr$>%Ls0 ƈnt%=}?@ޡi4?$ya2M3OF3z;힓F0iK:%%mf-Bz n.9!<'z3$ir zou"nS{Sw;sP|j]3wUV_Fe)(-:.~(.' *CY顛[ST;}ץ}t2=(hB *Jl:&5A2߰s^VߵjF]kvɢrםK+-vHeQmgL?CzN_q{܊RP__,y3R@$(\~ÙpR5u($3茭pт=7=d F r4&쿿|[$ 0VG2C];6f>} T-~Vڴ P8e%lR5cˏ7Ij/Ԯ|N?v3%]:cad2^^wj{Lk&ְ%kd>!WaVD ]AqThdd%;p()f&jeA?@PX/pv"8Ke E(mF()P֜wU4JQa{Ta{$+ UC@hSYchp"LST@$ @Tr)[Mˍ,,bs{?o$PIYeߢiSy}F^3)yY˿uA6bc5MPo-``o__Z8+o~j& 㙩i5s:}臋㫇5+r~?WvVljT(=éfCYh7+؇0#U+n.TZ>^))]5 'P;'An}o7v.r ]0Q !0Z-n*53U;SY_57WGzSPw8 +uյ꭪Cj ?r9; ""Mݝ5us›ؽysOv` >HhDPB:JpLK0kr: kL)P(b^<|:s}紶GLѓzu!g >@WEPxƼ090WΔQ'n7̣9'L[hRN6M-G3HzvU6HA ț ~@4Cl? .umyQ3o!k.-hhMvK!<L*u}֠Pb3^|Bfk1hJ]$6 _`[eB6v(%0 "nLb^/l}:"+o^G,ν򮷱{Mn^yߩS Eԧh55~ݛ]"kdɝrU ex^'9;ۖKkíua`z) e,\%5TYdweH#h˓uYqq}VSD-,p,RυK8܁œR /9@49d"RhS#5+]GV,.#{{kD<J΄8ÔPOԨF&PatgleRݢ$IAn&PHY;жkvkvƗQ口7Ez ܰx4w47X>Zb0nы: kzQv7l˖kcI+f<\Zb?woN;gTD,TYQqe|弃JfpbhYZ5 ޻'hU$ΫExz˔y&Q|$Ԩ_G>x蠟#Ѝ)G96: ϱfPde>Dʑ`?&rD=?EoOet'hmLcb8>.8#НEMjc ܷjW'NPڢ&ح䥤H{wuNk-ij5 QJVT+y꽸gI!`(/, 6,E\<(,=nղv ӵaΉ8QmZpx:}Wg֬[?V޶A&/"fW.Txۇ={c7m\ZRSpap8"M\pyjyk2sh3jߎbU/'}W..\0HC~p_c:dp:fd.t/?'c(CE8ƛ(NЯ87#^W9yU㯚y|5D-inip}Q༔͵{tsb'K.*njN̓O+L1lͨfcv]*ybTmsBQ}gOū}.zL3e5gQ撞h]4~ezh=0&4L^wZۡ93/)w5';@(uRxHX]9*"9n(qTGLtYv __ǃEQM(=ns;Gʏ$F؄f2ba' e FujVe,S28!܈# P#4'ԘTLs 8?kw۟~|%%o?>y??&+Uw~rd<,E(BhU}FțşV^wnWh#>mlYp~` fk%|YAs;a|` Wp xc)ӎ_ȣ fqԜ W?>o7& F-}X](Zd-B!4Wu(BT^nm3!X11&1VnGZf ⿝<_+uy}7_6"vqqw b{91I%g y$&pV!7ti=I.wZ_I- >yL1@3nF8'XC`XQ'0bhQ=[Q=G)fx ϰzr;BтMFq.ÊH '=h-)Ld{JEVxܸp*ⵦ{uKT<",Q#r*i Y&nIsٻ9 ?s}絻owt7%*E5EH#ȊJ,S ;:~1R-k[u}ߛ ʆRF(a"f ?,O1aL1nIP DK=K]guawjMxݩײ; 7~t8W\esU4XkϥZ|#)=1@ 3S 91 MQ(47^ ~|3f_T>4vaonիs]pRE6˕vA㛴]`FǨ&#g_E>%`1CΉ gu#VP6:od|zbE1X(E1 ~q`iit,[bӓKq0b,h&urpfIFGFNp֟y>Z`u옫ֹht'c'@*2Z# zAˇP#D aTt!e@323XH!$1)R0\$J&1޺6};q|QWjIvQ9ޅZW钳 ˴eZJ:- &(30Put'YI%$xQ:a Vf.}ͧ- %E*+Ǔ !%0P'H& 2IH) \LL0d CΥ?6N)*KUE&QΣZ&t ;Yz5d ]@de>X4Lq:VS#ZC q`i}dNKo 2ZqE.#+(J[{%Z8yDr&1*I AD[E)u&Ec=F T3s'GAǾAz/˴G=6it UNUc$4Z=кŠDu~u;`t[Z!4W^:# ӃZNotn"O{m USP rCvd+?=;OcJw} WCr@&_[2\%jFVt\ 6ȹ:pz Bath~]zC;h}x @NXЃ:u[*4 G0y-K%CJx06gBCJADSCi2:clY%=YRﱒa+9vpF~rwY.WWPz3N 6J+ŘT7:Feo1kO>7gFϒk<?aq^}37bc gޜ-u1ŷ՗YSs4P[6U6|j<X?^?-o{yRoYi"r U5歈1c4x >0*-b̷6Sw'%a] 1Y-6[pb̄"Sq61B,H)&qqtuh-Y*w xvEp+}H[mGgaK[p*FV=1iItY0zRe`Nȕ7z*qKOznS=pJs:'t] 9J1IP^H GB]YG+^<4¶=d4(!ٲF}b]teAF7EFFdFF䲆9GDtcQHvb9R9{ׯ{ kY N8Gǘ)⁳Mj;C^<=\.,ZKFA3h01y}33v띆zFaT9_or>zơo]n%6v0g(P2]\*iP:).?BwSS9Ծq R+] uw(]A[HզE.3GcDmRG+ۼo\whq콼ɡ/lrmI`Iõ%M~Ǐ3iVewHZ=^qZ; HpmmѠ_I 5c&ZZ($5h Pَ׍*z $ HJ^ ΃C8juLoLxЄc3a= ٨/5C7+P$W4*վ]}m~^٧L?= AJ;~St ~2:h ꅥ=t׽ō!|wb`Ͷݶb~zim&(uz\,MļO9T+H (ζ{OA z̛۽L.!نxg7sIX״:8*f:fshqBWZ|H/ Bs`l%ZƶB{y(,r,xL @ jS&JDK%^(O 6񂐨i'T)HZ4S @mK[_9j$5OjR-CX̆귀(=8ǃv E FSfB6 oX3;;M83a:TS/}~J_2Gwg h,:l64 Fb++lKd]h'BS.4flO<oy-AQc+hv ۷X>g %w;+y?_ތ(shN,wlrCX4hx~yh9L޼$wyu5pq;6/ ʅ.@*еX-lZmM H*.ؐjęJ@9=TnwKWŎbj\c},ruuz.ǘ-H^ ZE؃=(+pWtE+=4A ']!wÙbĀ(8`+J)rS![!H.jK:&ÍKMNB?_J$Bg'ܕIZ\O.]ژ>ȋI}-Hf-6 2F䡏Wv.sV>Lϯ~WzJ܈-,]2-./󕀙o7A>3&ppP뀼\akC0w@2'B`mt ݰ7ٷk5Y6:,I:4=e8W֟3mԠ# .kN}s{ol.1VgΞ45:DFʖDp{ 3.;[s;.8tM|*NSkR&q@ W 70z\I8jĘM+:;r#Pqb _d!z@ RZ&)1xx"Z_~RfȀa2;U4Eá} ~b_v+5cy]X)a{^:BMլ Ԕ}:Є\~Ug) oh4 y鞽„klLTwN^~fWc@˩ӳI| AΔF//@UD w^ý̡7 Maqj$$Cww(}6RwIGLKlY0\H &>jI>t/:DO1P"Sz4jI-Zw֙)a*QSMBPB@-ZG^yΕ!.)6PGHqf"EE=%;q"nR3ͩs65~0E]%{l4o!CA+'$Ԩ٤_i?:Q#Imզ15j-m5*33~,9 ~#8.I)j ](z|7ǣ5հ_>O ] ֧iw"/xGUEG%®c&itC.1#5c11LKļ/D+㵁:h$xEnmvG5BkĸgdƶaL#!p2dfn#i` nnAGi q=k.]?=8*3Rb1_1F̿jI;}eҖ+T6cEl`v41͏\<f{tWqn4zP#Nw{HFݳQ8 .*Ђ)3P)% 569ioqN2B=sBF  Q'ppbwU,N" U"DRHB#meXBéVfN)EBNĦ $Xhd/xܦ&iWX J;!*ZI$JR:EQAf4:!Եڀ4cgӫ кծ~զˤrkk=P['N5$jzҊ>X&V#)Ka@)C@9z~YC ʽQq]Q&\34΁ -,T6GQMB%JFC3)$`sG Y,Wܠ"Ews3"@ C=qq_n8E6!o%,'Zz<1"^\(#W1ѵBJ=t;YH\ "(HLz*ҞbƋr~>5C_ A0eh;:vk]=>{Bҟ_\EUF>QEb'CYjJOHZiްV@pL@.}\Q7ˋ](@("<)OSup%$¢FP@jI$ۍk^PxZP\Z+ .}E4H^*HM ҂EOQYpPc_$ll7aw۔2QO+=%wҞd,-ree%%{xz5G6Orb.Wm1 @t§<x%+Ucϟ6HƜ=Yӝ;N%f|ZV@JVhgZ۸҇MZU*ܹʅ`l^(LJ5C|H!Ier4@n4Ϣ'aKZ2L !+14vߤiIl 񬃂+ ä8噘t%;5)hlt@=To&b[=!ϰ4PgrF"ONz]I5P$9TT xI I`ϡd eolFq'Uu{pjv0J3:xҶYH%hn4ǟCPvpeŔ苌es}偤=/6&, t#*lj]8J4(h%Ƴ(Ո?S } [԰jsڱ|)H`ӤOQgnxߟY^Y\h~M :I>"w3dZ2;ɄR>|tݙR7tfl!}5Me]i 5Z4%az8FXR-U"a|gݸni1|;xTWep2M|s}']?(޷E C\L'ِ+8OQ\Qbg(v& v,'8'j#O#3ӆJΧNj8 'Wu`qB=&*{1.˭)rBNf+h5W>5B$5ȴRFްiM9e6_v?bo-`{?{Z'-|oQԉ }F&GvhpsVŰ6 ͰUF;`Շhz쐺iRl74 Mb 2X r^‘0 _ {;**qa QDzDڳT3 z7Z5 tb3B~`P*}xsNsߖ2nON]6hɞѫDIB,e_,-[2˦)2yG!5wݢ9; "?(tB)7D~CKI ؠGf2WdetU,VYh&yXP̒u<Ғ,ImԦ4 0 bHn0gra3s%6R/Qڛr j|8=t4x럭Tv| '2`?{5UbCþ{_<x|Z|V5h--7 ~Z,:3~\?x$u.K0Oa6dJw^MA^^Ķz-t{`K1WS$$,&IcI.bViU}&p)X  TMI`ɍRfs*>7 @Icw.bߴ~2r}F`N,ѹT9E΀QNX9R<' (pG6"閴n55_ a+cy|7[0Z x>a wG%:/!cN["\ox ,==M&(dS^7yt|g>NLߔe,jib`31hmQ:ċ+_+gtSRg֝^0`}uB8){:VygF?埋^4|r|_?܀{пzo]l}$ʦs? |}?GD(&A棃ݍ"3/W1L4z~_:^:s5B4QkϚ`\5kˆ0Nic22e 2+*Y #f͹8TBNjjsEIUtûNylB߮1 0EοtM,glЇOW'FcƉtZ.>c_/φnR|Th̋I<rG$y7/NshB텥{beh7#!/\Dcd c|.ڍn׈:vŠĎQG/ xʂRnфVnMH (ba֒KnT1ht[:MhUքp)uvŖvKnT1h3ƣAckhBj&$䅋hL_hs[qu-*;FvƸ5"8v&vkBB^TxoRBBA[,UD'v:[*,vlMhUքpݐ)VQO.!(9 lWBu4`DMUh*/5o_/`zp\(u侲Uce=DߖWbjJ̐QF $Q)R<(τSw)55*<4g*!aLŝ&k1!t~PMa6bC)E pv.d{kcn-*JbU]'%^ɏ;SxsNjAryjpre'Me*XP_eQyqS] zpIOX# z2}Nqgpw2[g0t"X^Iʝ̀QVɄ307Æ 'W8@NbMOIy}TW) 6X?QGgу,qM UuWdhTqC8J2#r)S beiuf x@]-7"8,:pڄSRNt망O,-X5PCB iK6]Q ]fӋ’rhH9b&LϸI)vd$XڂwG*x+xۂw'(xk0* ʜL-ϻRDٜbKʼnズ;M@'o?;@gxw6IՔf i5JXl]ĨęƆp oS`mgyJ6yr$6H1k# Cq_JM4_#$x#zH4.4"1Mzs;_A^C-F0uNa\ 4yWFYwufբ_͛P+Ӝq1k -2H &k\#*U.WX99[N@%RmZMdo͵Ӛku୹vFs6>DϑjKQkQj3iZR Zhl}&-sϽiٻ/~qX `k (<0D%BiibTE JIA<ŎSYL$ ^mHp(Z%n AwN+%o> ,s'KPe)XI4LwZe$EXHsu TH_=OVd>&<#Ez@QI6h JBns٬i0A- ep~s s1cӇ#?DzQ;Fkv],v[Xۺ?Iԓ)S J([I=< gicJf+FjeEFɮH2 #b529(,q8MCrf1Py&)e!$ԣl4M4ȵZn*§!ӆL!*sNt SJ0aK38"e)#(2C2'q%O\a, T;g|Q>]pIefG2ƄK 3_q囫۫3Y_*۫xcӖzP$Y](%XsTi L Fsa# Et>0|3G`wޛwwJLY2]1 I,N.Q2KF52x g07;`7tT Dj\)X;peϽ*0W8;%Kp?~KY ŲOgU}ǯW?'Tc{Џũ,?e7U*y`o $Ō^Wo:1S1Xb ZϓK}Ow?F_1SrYuX`>f} ==z^A#%-۲M`L˶cUXU,V,)(v)5B>y,0-`/}u)7r;ZJ9T{ n k, {CD4B]_ٛ,JxM{$λ"*?U14kX)_}<\~\Οkԧ*vu4MAdQ[ ;ep0f{Ɣ|BfKhGl|`g<⍇.~Di͊Ʉhj938eNMb>4{n ֥>IEp&(!Dh'r#hNOrh|>z$ i0}jwnE76@`o4LeM9M}Vu1_0BlOB0DbQ{t8 U(b |/gxyh+ixϿL"39xnt@s&˓n˹4Vre| ~PO%rZЍ,tG=х`w"C#FLA1֤% dRi!<82Y~UD=]R_bTfpyx\\7GtR\ZjiRgaAC$Q@d""EP4_Ǐ҈Y-X{ hDHב v<$nGw󵢎O 9+PAGR9?/U9*4%&4ьzvY (e&]kAiy6cԖGwlj//x;d1B^Χ&u+0~σJ- d^F /QβJsN+pZ>H;/ZD+q˔J-sSDa &RܫfxmhkzT]/zf*2(8m~_TK4[@*+# B}< Sϥp :?YS=[3_?CCGo)&6_%A?"Z~#Zg38L^=.*;"py #}2pL]Cu,j~^X$E:5S|]붒sM%.kY/!CMD&8U($ID!qpAFR4v)#1yű=q)٠Z[G- glfV/Vw^7e1=kjomf/ZUM#'W6o'"-WuCNޙ^e￾)]>pBn&yyyT$"$Gm5۲~e͗jI^9Ygȵ(Սڭ'Xeeݻh LAKHH T:Rt)H%_[O$\_D{L!o=f 8ώqokۦlR$\(LdK"PB[CB@`00i 0 D(.` ]rj:tH d9^wku&^wkux]S:40T"JC%U cǔT} BpH%Z:\r{k>i.Njt{uXb9`/lq\b 0P>qR  ?D1D%5 cN%%X|E!B0εaqcIH)T7&\ả6$D[ ̻90!0mv!,UBb5M5RExGJ34 ; ؍z_xu ]H+Q$.=ml)Bi 4Q8S@DK$)Qj-iɴۋQ!BnT0zNDx+v؅,gM؆VD5-Ӭ:rcg&Y WW7F?R~knmMdWe߀kFq :]>D 0/f=>DW.aSg{%j|-XdE_gQ§<͋<8$ K|Ze[-kޝ1 z4)Q8(IaHX:ݥP؞ PuyRqaOLGN_Er6_wrǿ=-˟*e?"5fث- Drm0kg/JSG*}ܞrmP̍۶.଑$bN^Os6=Ni z~]'JHuA-Qi:߄QiR$Y ԢqsdycV8;[j?UmfIv <QdwijD*Mg$գlHjlml'5lܟ_ eJRy`tbNbPiIy1^}ll2 x^]aHA$ ^)RUz EςK,XYT%^y kܭnKGoмkE2ҁ5DMx*3ezEG3"@![Qh~?)\_zBm3A? OY%X{@^$h{ZBF<Ig֦tVŜ*V R JwJ},LIVuQ?nΛ$5AI=hG ue&CtIѥʾZכ :f 9 jLErx2!0ߕ,D0?F+\RZ% 9-))eYҁځZ@v&'#ǘ D$ZT yR"hb r,$%Z%77i!A^t|榘1[#il` JrLHOALagjwU.Z.瞬gJ/iE:LeL+ĆnA#B wiOLkXNxQdVi[.`>-m,#;ze֟ϋ`z{p`Uj|`IF $Rùc51 rؽ2cr\!/승x2(MZinS{̋p(sE.]sxC!@xOF)q# }7g{169N v:y7x-L;k1HTYO|8<8l uC/Z2lgՔd x@i#:mbURFWb:oja `Cf΅q3vI9aXpL7umF8h9H 4l?SRet1|Kg'r{:Laۖ<')4<Nde;`w7uMellIz#6rbjV2!G*R2Oi#)hg]2*&wAF4d֍nh/" Z.I#e۳)+Eg2}WWNm}7%M5/G~ubA__^_y _]OaP7oa^fRkĴƕ'1ҰH$I۲@6_ \pY߇E-WU?~;'&M+qZlJܳ^8d#Ox)/*3JɄONo3VP!+P}&&X/hԾ`~mZ?"e?Yl81@>;W^* r_(POu ԓ}fupS@|]Lݿ^w2eiA=Sn;dO\MZH&z \TH#1\P>V#S,2Ԋ\|Bfoy:qXh7te"Qok7C PX}8d*ԇOS$0ch[$9pd./gd c#?tyS[tq܁v^ 3is-%cB|! 2VR(dH-A(behrk@L*/jhTo׊~?$g=d?kޔ0q bt^*ZXlqcL$RT0@~7K!@ms~ ޵57n#뿢fjC;f3SLeN˒FTHY"u1I2d,SDw@ xqqa-VX~'*ZpcmJ`5ƼcIܥ{ˠ@+^ 8"։Y!O˾A/L(YCUn>6 ;/ޢp*oKt(+]/mvcO3Ϛ #q]~Ւh@]dr-viw-8Q}8y]pk~3:>^W6_@1|."1t|m=Dp;uXў*p):>"ccFY:lspAL} 6.Z5f(S [[6"^ [slnXʼAVo:nvka[7gY%P`,dZL41&!4.t/9v޶ޓ}:lL S)80I)M9}jOIƩ(IM74Q(d?gm%A#&*<)%ǭO6HxRbÈH;-ʃB8ȇ `q qˈ K]s|pP6mK,;G#^OEy&%{3M ?M<[ i.]m#G;c&_n ݙI޺z 0*Lq'##@CخvviB/#ŝՀ ,8wN|R!s#G>,2o؀`&TBh5x yhzt`_f.VO8!KKz`)Y3=Fp8<\S^,gc&(]toVt쌪ATzpHTRF;&S"P`Kq@SbGքGK+-fo}z ecs|)PJư[vgR ">!iQDטΚP,(mq[E9.8D>獋gȾ [:LHH'X1VCxOJ7M6OqCL+cnCƍ[n.v¼~]̵pCBT (($V8rUb@g0s>?0/ ޷@HPؚ^Yd +02a=H\p㠈m#oNi` ETaƃ[?{xp}t<)c|%g]6qFso<2>Mtn ֡̀5>,,:*[ FoA3Pw_F, IZ)TJ <#vNkHLFSx[ӈbCLb^}}b3 CXO"5̷AtiOos8E#wZLXsw[*5?,xϻ><#|y?||4va_]dɹJi{@RaE2Nr5QT1֫_5Q2~rֽpg;!xnn򥢋Ja} bݵݭUpڳSl&1^*~Tչ<_^*倚⽩4/ͭCT@%]e(@BE[v`@CT@ˉ S˿JK3XS>\Ra]˜wL+e:D.աpT ۮ*Ѝ^(:u&c{Y˧TPF{V/rpǘ/bB1ة6}PS)C4}He{*V}K*8ǡʨDbYغꝟ׈E@lTEu2(ࡲu;STWHwY2xw)&(4~=̴PidZ2@}; z)m<Źsy ji =9+\sskFCaW ;3?!n}Ak`0&q3Q!XN~1hss \ 1 P'+hH yfO΃y2 #j 5 Mv : #E8V*j+A gK \ dMe@k[%-ID}̒Ov$Jj-q}b;v:կWEK E:6¤T Y) !ǘ1BZ,2&"^ډ{2ntюk5q@ou]m̷`2?{V;\6έ%V8P>-R>2d22[.kjz\پMfj5hfQϖy1RtkSP`D)ڇMt2Lo6sA|;*}#NV*J|#AINI@15X[u^U:C?I-轴E%0~pVl_1^N_|\@,8ZH]P'ZlXoCA^oa"a<? fw3^>ܠmD0(V=\SyƐd*Aݛ|;1"O@ZmhXxFDw8\j`? }ꠓα:Fǎxx8GRgDg< B옔[qo Rp*z`Uŷo.v#;޾ǣ+nre}胧@Г`P+Ke>-$O dײַ1w;S#w9I۱,F5ŁE emqjV@gO]\FXsdYKRhJQHbdRJz')Ie.rHTPh $&(Db3_v.\B}@T,QM7Brub]q`wkU8?کΏj,mӉenY5wy < >#1t^,.h֗Ŕ2dMFMq~^MLvKr <Sh6f[3/@ߌ4CStmNs !.xN&7X롉y]w2)z-Z(u6T*Ř% K{ rF -xUU`Nq>ljw!5l7!$)gQlHϯLP>iI8ِhJ JOM誱4-fq!y%x 1J;4fT F:a+dHi֐H)q R% 1h(\nkVhi]U^>aF,`6a# zFVmY\%Hϊ,y: &"%٭*ҷ+i!;Z`p9ǐbQ``#bTV'Rۙm}iUH)$†8a0QySl{*PC@Re$b3 F =ެYuF 脵l"G]:JzG:_-=oTRKÁT%{Xpq'z?C;jy,rkK|]U<7_ԂvӚtZ#*}m='14Z]ۥboCAI ů;k1A@bҢٍZ}0'(XM؛ A߽T$-965( =Tb:cџSe9-1F% XmsK(%TVrlW<#[tVKmWҍx*% U)"F:Q `,5HR$"Jp%SBSՍ"NE^_ *HW=heUThOA8{78M؞>k2j͟*=|t`Xt'Fbk E9HRbVr) X!cZAbnDX75C d$: OQ9 i7]o`=x& wRhϛߎ 0:ax 1슇A*oۘ!)D·?t:'hO0uQO ~Lļ%E<(ZE{ňM8\YwBU Wnr8hp@0Bxw\ V̀z{Z]v@jp!T YE5Eu!#0"?{Ԏ,ʟdtYD +L0'n =$0<е!"(nvo &*ǝH_\/.D0k2"( oU<슧kLu"-OhM 3 L5"Ip9'H]Yo#7+ Aۼ~lb8$0+Y$'3 ߗ얥n)L<#ͪX*d&A¸fkqM@ R U҄_#WƦ_g͝zґD`&qjee {L_˰XCx8k2 g •P6*fY9<=UJܷh͕ 1*4Uz" *-{vcv7eOw#Mn$Eq&WlRzW &'x*a Tj*6-ج)AGHA;( "ѝPף@p GZc蒵M n{Q#*iG +:h]zV2R06K2YM$@:% DgNKT3Xq@oN+W! J=rބOld'&S6@abMyw&_6NAv!IHnjXV+w䞬* #$N2 i圦 z¨0&W@9QHQ@$T 1 KJ4 3Nnn*!ԃ ~â.Ο7/]*hfa-_*CehGB%L{ôw:L{ôw0t8 Nb 󈤑RM$0Nx9?>M(D[̉.,cȠ7炉[n9 AҽCݚ]%XQSLAl,UI%6nޓ! ք B3L@q68ahthd=4l>b.'mCKhq0˅Ls#uJc0儢+;̠GPdSn EʕXA(tw;T{9[`hl4ﰌN!%a2A-#6W];%^aϰz_lR(8M8qq ͔)ˑ9'$0cP"lp4KskXXQp<,Pzx`a졾&a88?w/*P~P *-%:h߶/[ǎJAr)JQȘQ<ϸvIs.i(Bgz\=NfA2qZDdzك! ac9B 7;&l؉7XAYζ} _R.)MvX YEAw Z6h+U<\gW/P4(]lYSHJḢVrd{ժVdI6՚d8+.ߝnV~~YH@ Aj/S*GҺ [nc]@y < a-?>똂Uj_{uG ujR|ȃlQ t&(K,ࢴi iao|%bw%, jE?1>HoA Jh4@aզ$5b#Wʘ$ >|A4Sj钊-Tr<c0aIg@4?MSuFE*jKQ1~)ق ~)-V 8dg7]6Xl1im_p@$I9d| hzFnPB$q[rgF<+ (Ĩ= ,e‰JA!ˊ{8d2 pw9瘴M꩸FSzJٯɒBraRM@"E4Ĩ݁4j2"'l}f$'OP 2ƻ0G89rMfZj7 ju {lݚc6_n^n4Z#Σ0a$K0>|-@+&sK%"F'QfqtE(PB 81Fg i<-*xGzf3; S燯I1urVL ozmj)i+=e)&P`BD$#3QE1Ou\NRBSh  UxvlJPJA@w!UfEA ej-mwXMc;H Ei0&j^$OSr圫)G2Q"2TV1p $=R<(MET|M$9AB7!8"U|9O990NIE$ñ#_IXaı'j= N}ԩjiދt?&_HQ(cc\x̋ 5[_s#,^~~n_>T.R0Tn<?]@,Tc| O釫A-PG#e_ԧ0,\Ep`ɂzƦ"r巃X*υrbP${2q0 ]gCBƫ."v #Tp @[c]0 akZq=]zẌ́3 qLP?uWo˖:a'jdzУ4 x x/ϝmْ"C.K _CiV<-腌eIKs,uI3NbZ1%MD.þmȔTHЧNfVmV0`W,g@>vH:#ws[->(AL\3!if,]NI;eLz}݀- @u ުa{yo@hAC}vQ>\r/kxO^lHL&wnc:%JU:pAZs:d#91:KrܧʡgZs;;cCF+>.B5ko4J>&9v5"[XFUhQEj}"{NCM2 ѷlqlq&wqT 1zҺ [nc]@ UR:`l.ÐNP{aã aQ>ÐDJjԨ1;@3N3Ѕli ܝ LeɾfEW95pǹG*/1_>[5$=ܝ%pa@N7^<>j`ώZ2$x(^ul<-[twso)bbhX~/'*I%SkTQfP]Z~YBd:FBsSlISzq&( DR2&SR3ZVYӗ&w6e:W,O{7.N2ayw51 ðJǛsbri3@X]_'r)$vܻD>fPqJ hmww {EAZI=V$ ` 厢hFw~FOeyHB1c&lF>zHʂO*e9l_.1JYy]S+n࿲ V8,ٸ,×) >|U!͆q%T+{o9xp?T*N-8`*%Y#*Axv |% 3KpgjOϺXU}TޓX'28ɭt/A-ۭ, Ӊx+IܨmUwoZÐ=.>^g#we=nI2V+Cf0ؙaٞ6ndd--7dW3YUh $YEƑGDCnbQ*p % +A20ai ks#+C)D+|<w9hΥGibdRN4d,"3ԝhLcw^v%gYd%:q;Y33QY3Ao#ݏd|¸_~f9O6J)uvZ]h tA[&Jˮ3r<5JEWoNq'XV"Ɩa\\#,Wx`IF CD(s1+ KՔ3 6hԺCuZMFL|GVI30L<.IO$жLEL b_Knw"3Ə9-t%hҵL!AR}XaXb6%x.6y8΀\+ŋF沸*tKP]V[lon֦ eٸA 5 ˂&B&ba̅x twm;-:>PťUX0 jKz8]V IÀ\ٖ&pX] dbR-eéE-xY!jlX p͜nӂ|3&fy|wG,|v.3{wɌ ͢7{R7sgRȆO )o'r!8TjRh=K EzI_H)* 0z]f]rW3uw`aǥ/gY4|`W 1,J.^nGo{] Nq ihz}vTqyrX"Ǥ&NL$8Px)YƂry0NDIBK쥗aE0f^h 9kU1~|͝nY|o+tW67UL L&WlZrjP0pE6,Ĥw)cJg(Q#d9I0(3Mip`N!+ h"PA"9͑1#`k`J^!JbΨ4L0͘N1ϨW}:qUIG,36M׹NZ3΂LJx~>L&&☧ƾqs1/Z|U1[ Mڏh/ȇϿ_ULf?" z?8@,w8W\z;[}f`&ӿ.J>3Zpn "&q|w/ kb!<;lNB%& UfesSI 5Y`)\[\NpwbEx||vט.hC)٣q1}vPSm$)0)0/@&TWԷAYa vR&gj-ɰj~!k>R=*^r`d rR"VxIHߤRL/ o0\"UBpk}lRUJo[7&.3'9Y?'n'?lBu}7CLIȭ̑<21j#ZsJM$EG7..Giˬjňd&\O^4]Sp1.P%L>%%$1p"Z0F+gHrHZ: R+(2Fė #Nc  ) E›R]벀WM W7v6a_#c7v9` DI)uP˥KdP|MN7ju,8Q׽wnxAo[d29m#ׄDp{Α>]riD+lw¸ڪsV yF1#XȽ;:z |Xhƪq>:Eߙ|s'ٜj:GXZc6UaU1tB_JJ)O\TFyԲRĭ25WƏY0H3GU9]_P,ڮ\sN g{ۛkgX3,;$3?NZٱ$bZ(W,]0BH?G{V >SغeҕNuxpIBL/TǤavqk/٨%cY0Lw`[ft0iW'Vqfnk[C,?L??NCl[牛;Cz0xx %$^2^D, k=M#{a|Oc;G:;K"IgeİkĤdγ.sөo*&*t YUC;e0=Q5qVTbД#=d%t(Knu/,PSWUOI34F(Õf6*tN٠7j%s%znoثZ$+yUl+!LQl˞g$R!f$vIg~É&QF)$^xN58E}K[ߩ(IxWqZf5=jn"Uh.Iy}+-qe_7AČV]kjpfK> L~+/}w𤤔_ƫ٪;~NA_y4=&ٺbzBoXi4, 0)b p]aEC11杻%8^ o,Fo߼a,R@4ʠ^ 0bNy_J$F $Xi !N1fjNw3:Qt8%|[?\a;esD Hb1 ʲ! f,TB3jb[C̖S\+ 5B0dВRkW DA1ZvĤߕ\_xĀJL9K`mU*ZXyJ}!hSxuC5F3l4P 1%B,( HɘEqMe,'\#VۜivTʖqwfv<NjvR+y4I lr:I*$K#?P` 슇( *"N1"V Js fΗ^}BekpEj*CIi~CZj2-/'֦'ve} E6&幊IkQ&kt~q$a3SQm< wW71q'e3SQayZ\﫠ݱI8ҲU$=1xSv !هОwow|Ql4zǢhR]Fw@B ꡋ;q|i.ƻ{57ШKj,@+3Z)O-ICbC:Zi h%5>MR;--vlVRD"9^3MG6:W1&*Xֽ&*wunkp.4=|M'yΝ8=|-#yMajɟ7Ɉ[w&n@8fuaQqZ7N,uipEIMNm- 5Crw=>~zELS.|Q?iѰ7nAGS߅9 g3vZe̗Ώ~Ni27[:=*sfΑ(ʞ9[1a@^j9nN62}j_Bj  ʄ}J&C(c^iO+NOJFw_PG_e:XE@x:Л-EET .!%^akav߳dDF3mw<4%_Z73.<>t<֟~`C$$1?C}GOa \䌚0> HB^%2b# &,PLR%˒}u#_G|$G# )RP;0+"I MlȚ|W}k% ",9˵G/P 3Ȗ\AF*AL$)$w[/a  98xiɽlW \eZ1v0yOGg2cWK3^%iP%WRQ]I`) PCgVjO%{40jd;4?l ]WjF3W|85I{X a<~w5~p?qt}hh{>L|vǁ Gss6|?_]/s"VR}|䑠~][o9+_H.[? b9`2}مм[Jrvؒ.IغJ$.$WEKOgG[;2$x=^1; ,*}~~S&c*C>hu+ŠU=GJ))suh4 q^k얫& n9ٴ4Ѯ-[ЦBN9~An"vAцIB6m5e ڔݺ@ȉC4 S/gg)eZh;۸u!6v2^eBE)x@(dNH(V VH/+ zAOANg lBA&%p*iP:zHY?t<47 lyԼ@1T1y"Z/:`|&Gb6M$׆E9q`18OҊݔc`\4Nnmm6]Ee$ݲmn] !)#v3P;>P-3h7tU} xo8D0%}rfQsWh5jK iײdvt0:cBXj4e.&GV)ti"#d)T8L4 2hn+Եw; lwh>zOnjOru:}aFJ2lA[9qfaۣ8W Q] trhn\EH:5e ڔݺ@ȉC4 SJlnOo?jmL0]H?ea#SlA[(M8Heƨ-ڍǪeObG&46l_'!',LqnJ`\4 B2h'*' 8N!'Lᶧe%Z5c̲-. #Kl#7 \"q( ςg1 K綥<!B/-!Udƃ D댏Z0-Q\a¨GQ1\4!>zif] T _9q`J9zOnZ_@'6vl-[ЦBNYq(b\4*{G\EX{̑+h !5F %=-4@B>ľ5kJOWmޫBD(QzmH}C L{Rf ERF%/OXzDhQHw{`eBdӇZӨ[wëbp6)ob?L;<Nt//K@@?ob>RƇo?}`Z5z~!KϐE w@z~y}F5eOUb:aQwnn<ϧ[ ) o/C!H ?y69#Ul@47l|p~AᴂJU,$Ln?VOhZ|L/|h~m1xqv_Tѽ狾sc%ᛴЅd7rEЅ~r77ZK7i (ޱ.jMZ>]}a7ܢŸT̪͛QT ׭Xwۻu1..< ^fJ B3?|1B{,Viyo\{gWlg٥欷˞EXcZQL~ԹHɗ"Rm1-ҡ~-͡8Jӝ4_`-76ݱ@hY,`~s7?o<AZI,_) e{]qJK<ϟ}9Ur_?57)ȗ߹ 8Zn8<5?>E5>N?acfCqy}t3~{w&Q9Mn+f:oI ϤSf+Y?Gi.AOFLx};y0R˾fR1w󾓲,K.D˭B?6ioRM:H {F@oǦ"RyFIc?S}鴏}@D143І>$zbfevgܙey֐nͭ4|P2dUݶW8žC 4ҢccVPJCXĤn;H^{f& "EP9=!mTcUF !s}kO,THlds6[Գ>h%^7bnk x<ێ Sp  994jǻ:|u+P;z<%-GIVhp1a]CɟLP"M?9IO>y/ګޏOWgQ"Unvq8#~n_3@+9Te)~*4&(<6[Y7e݄9J[gKTw_8z7Uչ8WPUٗL4)ON}#Ke)`qMbߚ9Nfz]d}f{[X&^)E'U*2ga47g2@YGr>P+Ԋsц-6,=p<\5⯥:2?{?<#$7/Ŵ9߷RWت>Ib0ѐ컻 Y]ōoca:ca:֏= 覯e8w"]FZuqW!"RrTÖ!uf'iWxM5 Or{-eKgQUp1.Wη`3{hNF}7L0E{ ^(bABB̉V-L|!-gڑN0_i*m܎Мh6VZ:EÈ u^.U^5_"` 4E,, 2[3PzfBɋ"[ ҟRׄ Qfj7%7O%nQd6ҡR+U9sb-B&K`5)PNR$Q WLe$k|TV܆7e'T`7H+.wmMlefnzU2s-ʆZm$gG2ti+^r&,%@]>Ѵ.7 ;]v V"؁3#9kZN_U>Uw$͝+^x0.-@N>rh!^%ǒnˢh}i1)_͓]B_mIMs2; Njd R'_gn(:e&;ߍG"ߟ~9̤65QeHus 1 ˫zGaW ou<9X/>{N9iSo&'ˏ<"naJĀ?ypE|/pebPlX8'JQ8#QOuJ!D%.a&!S66Y;|SBٌv8{OtR? 98n΁&pGUPx.*xf_m$y^:DفR:y>P6H=>Ǒ0Zڃ+ַ䄙֟{@]Z凍Nsux¹ɪ,~_?;Rˁ@}u惯++'>HՒ<yNaoٞǻ,HYudM.ڎ>3Gɞ~+褽۳.@*O!T+[Y-S3R)"u^Iu}u&GY^k|4m/eI5U :ㅉDsˮ~|', CƄb┿Cft8 wx,֝Xd`j-U"0lˍl q>KF#d.jqSldTܕ,rdY_ڀ BS]p'˻M&JIM@W)h%pU`W,8meNt~EMoOtq:E1'f=Yԯqg?=?}y΁"ػ?=玗dȹ2/븠i5V%HoC| 7ȗ=\k$%F?wo?OLET~x~sfxs»ǣەU|i1 GlJ \8ZuW{9&b}+|FpMr։pP16jWm֫¦v<>w ĝeRڝP^\8F]չ#k|SE!N uX{xwU{v>Vņ~,;䣽C*;٢w,<ۘO%DP?+BG5GL^a1Yc.0爲 %58C{0 LIƄ)qFY62`c8?iV)M!JJ#xS>gipʆ}';NWjVH+jj3ͮׯ%4x)55)J)ۍ)nT7.O=K3{e|́8H͐v{.w/ק7hP'[ꮈm`'7mo| ]Ս@~K]\=zxVò #.OQ8#N:L ~!0SKs]`vi!g~yޞא"|]H-m7!Y5Q㓝~wXm8h/j'wֽk9wEtxS^MHwx~P`=X^fkyE0W)qyPmަu'=oWCQ 0{Z=F sUiui5Ӆ'%>i$sdsLM-WN2=~VQ}-@YCgi#n:km6MȀQP0}t kAA\@_$_:{tkd:&B^ WAV:^*?A` P-/tTUZnڒCQfhiAv$ lA8}q%4($ͬ%By RzoYL kHt8AT)Nc- AJDpɕ :I2dIq;M)<5+n!d VnvL!r--P:eU6;[m% P笟ȧ<Ԣ&u{&hbr6E'rdBo%c8+z>eSašs]č|;_*?n<2< r~9+(lf\upZ/A8::MG@InNҍ˺ooDrlrڥezqfe>BIV[] 퐀NbŷKkcbD`\]SVD &h(O L1챑4zk@ϗ]qćƠݔ(BcۙGѕ7nGRNè3*yHRl"F9JCot(\o 8 1E%X˻™0|;uP7N;tok[Ҹf5.eHhp*:g :Y0 #_Žӹ Z>o{צsR NeaWdec|j$0>P`4yKPnKҸJ0*c…Ou[2ߠ 3 o AFX3zbAۍiFhBLr\GX ޛH4z3g#:`%K Izb"8xYpB "(*1XǤ,ђ| ieƅ#5/k'՞4YHHRV]S!Wp\8ng #~+a cMFZDpIn&2/M8%7OG ƒ%5vpm."/!ytLJoݻVLuY9&,/>~6/w3t=2)J啵$U9).)[}fX퐠 O{.>;3@B@G@JH&n$FfLVjjId,:[&Uۉ6 \p^vefi5ѿ ђ2]['t_pIwFWqᯞ-X1[י$t 'fnOVHH 2ΔHfщ 5ڜ(o e@-jRGmbgYqi0Rx4@t;v*swBE6 7=wsJAW#sml/m\j;tkd.[ eKPԸFRN 4*p@dt4!24hFv-f ֋pغwm:jGoy;5}<~ibh>_x4b)$Wih 7['!%,n}q' P4OcT4NP\9&& 7{)1#$>,~dvPZ1­WNY TR"QxV?\>/y)㩉pPAyM8X5~Jጝfo_xʢE t)ӀrñP36@u]q}*B^g >jAiV;C}hFb0~B,\3q&]P(`ֺc~Yi(%OKoyr=AX^3Fn"',{7yٻM^nVre6ILEpRj8e)F r@XBJS,ؙbk0BSK= | L|*DQ 7ԤY-$>p\hC²)PUԐ\RO_Bue9 D(O 5"kxD%,qK ӔOA3")ei!oi 71Pi3 !H02\-&(A׊(kF/w^{~ 0䰸GR<}3sWTm%5%Qr{ٌe+VX((^P%`s@.`[FH gp9љe>/&bI=RF '%ռw%.]^1>%;DJd|ՏzO N !I0150&{xFRhjV!^Mn9ME-N|cc%1vU1e> M!)M;sD[Eo.Ư퓓 R2haz7둃 Xj`qpnIҋ2HK}Bsź+Ga1!dJp^t w`| 횹OXƖh$-keg9'0Ā2@P‰*X<:}qd4b,Lg}Z3A!Z,:8mdX&tO2Ma5 C{:~ILďϔ23;+7< ^9 XpAFhku;Mpa*NguT❁j8z Я ,2]mj!|]!X֥ =G99)kRf NrNi৆h eAg+ZFK8i `/ ?gy[JlĤ10&#`.y *f=LI[Wo|[NdnH:I:KAH cRk!sÄ,Z!+s/(e{ۖC" gvfHSď^*_"vYZDHX4uQĕ u4р^F-2rKZ5+s!kfC YW,N5κqQN+n(Cnق\hLpeHQ@h]c F ]'pe=&/G;| =>.le5Z $Caq> \hGνxG?{71ךΌ?GV9P5zoIyG9˥`#P\(57(@\a0ngweDfk a-Q(jr28;ү1`OB"ɭP j>MYHw]_k=8sJ,3Px-&`R˕ Qf`rA8PnKĹ܇z8ͭ&IV wpgRT`Ęp ?QL 9pd>B{6f5Mt K8s@1KbDr砣QΖ13\ bU ?#Mv363%gAgNi஀SL`@Zk/uխR)a[O3(bPssaùQf@"@ 4=$rJ1b!ːp^0ơKj }M4wDnG&H[C]BP52O~Ž/CH9-z(m0+2XE*e]ES `)-4PMq?ѥt*$BM:cBUi[33+ &s/Ks 7E6|ҳ77QhRא'?U'{b^n9}{ ~[\+dETl3Fj!,[Ʊ";;R^h_lg%54YI攞'A_J$2'%2#QI+3 .#g$7[ucRhq1^yFq!槑U\S}(?iNbZ {2RJD}{zuŖy$N~+hW}F*S~Zs QV*;Ƽ& xQZN[^wB7JІ$JuCAAҳ%D2 ZdS FmZ-O)fT0>]Rv #/Z'._R&34_ӣW&}ۡя[$ EK! f4  ZJm9VBnHD8J8ϭqŃX~ <2eG(=@4Fqc(8K∔܂=rMDE5YP1ֱ+tJ':%ߣ(5Jh[3|';ӘbP-eN,N\F BfC70j dF$혍6L,7<+a賵a]:[X3~vlb:m#.]<{GsJK'vqvB] sJHi\bP%\bUCvfv1\bؓ:3Ӓ0vTTRL,!W#<'y${G\ˍk yvkKyvk+yv&kkyv1QhN[~$|p|X|^1B 5N'nbqF?,\ß]aB~|T"]8.C:K.0N(Jp,{Vs.ĸPPeY¹7`,#oK$"Z|fLRG}b+ 12E r`q0\0^PL=0w8+2 %:]Tvkhj:uձɫ)4=+4v{{7+((rf9}곇~=̖W'2]SM]DA\n6Oqrr3p  6ƴ;`!X-oa lɊil6et+cυh'B`0m z7], Mp[ԲOK?_˽V kk>][.>UNut/b5ʿM8iڹ_)C&O{d<лP<ϜDaYJcc,&!^1wYٜ>0ҙ,?e)h)Y>\=P.Vf$zb᳹_vM0hUw1[4Ӊ[q,o~~?>AgACe9Y>dY`ã/Enfi;c_f'[fڃAs!; w"|yLa@|>uUvÐyeACP,tq$4Jwlm|{2x3m[űlNϧ{ri/ue쫉3Nm&9`멣`v㡒H?wޯ6L~u\=r%{Ot![7; QTi|]R_XKFַ&kh(GP%7]϶]U0"^-~ 덒/Z3W[ c=JJ6,L\׿qŚ=sVxK?)},LSZCPjNN'8N0FIӌluf7V!_4 O3a (y"S&4 UGF%7Q0{yQΨZu0AbbKۀj;Xv|Fv,$ܱ-揮k QJX]0$M^*}һN#:}Xio6ii=íȏie!!?1< N-YSgEd .v''xt;Z Ny`X@Orzk_A"0qaZD^o< \VOҨ)FZ~FnŹ1}LJЎJ^U{$<:LU~YvXY>}F]>͞mvaq{%4YiӨHtĈUKE;/F{m׽8Qiwbd"0;Nf2sI˼TZAM3i%΁.Iiz$^kU ۹UQHш% de@ڼnUFNmJFsW*v=ح?bi7V*V- SGACSl):'c#ieҰ2bjkԆ/Kږu/^*/lX8kX %qKنvԥ$oC]q"˅͚vܷQ]ņ_bH[HV\ο$x_kzQ!6KU} qQe+IZ}Pͫ n{\K R6;PǘkM gFȪd[0wSK2 "B)ƨCh~-(Lcf23ZʜX3I1 A6WV D8Ɖ]D Db\R5ynDj/eF2ekP}jAJ;! 4S$?ab&q ֓nvY'3|r8| %:x^$oH+IyK\'ߝͺ/1<0t ;N-t2eY(\M.g#0`LOywC9@:_w;k *357:tÓt3snl.IFr"Gr>Cs 66cy-&J;nueP:uXǻmQHж[mFz&!Z S MYлՕAcﶻS뉐ȭyڌVnM C۽ ݋̴ @xݝ:-_ӶwhUB8DkaMDۡw+*Щ;:mww6n1vhUB8DkajӻxȢwӘEU *-cv :-^UhBy)2ʠ tꎱwPSּ[mFz&!Z STbލneʹ *jVtwwA"frʹq”{ݔ܋̴ @x6 ؚwhUB8Dkajӎ- /x7^x2cvW ^7omFz&!Z S\^E9=7j 5eVTݨ%(9clmS}"՜[{+.rj)yË}xZrZƞسդ%`l-RM=QK d{f^ Ҩ%0N/o 4j rӞ֘%֌s17j Z1K5XcnH9<3xc9֘0ë1S.X1s<=3U[<{1֘DjkMZckL ḵܨ%HkMZG3's17j ^o3c57Qcj9a5f45Xcnjbx&ĵI|x5f(5Xcn6vYcXc5&-Ab~5fIYkZYjc9֘09"4֘cYK ^Y Bb9֘eYiϴ5f-A ~x5fM5Xcnt9>fs17j lwYcXc5-A#T}:%@A26Go3Gɏ~ׂ-)}'|6_wI:링ďexAQ[<"K+?ٲп'>o88?'ācI#%?[rG^M/K-({Ɉveά zQ9 bM$bJL#nc8",䖋 Vr.̱׆> $ =@'3_@s\j1Ę?#|b@a4`y'-`<>@םk*q鎾2o-*ʎ@=Wu Hɻ?We(鎯}4@pN-& IŤey6+$uyT@HΉJy;A'Z͉97P,#9y8EuDJT=QW1?^33`.fJr~bκ ;gfb׵%W)AN~TWa]VGś};7dhF0`{07iΌo?v/9^ o>])p'Yj#K%ܩ×._._`5 B~NUg aNa6&ZBf:g9ނK(׊L-Y9(4~J4?|? C\p)i*Wyo)ƹ0He3AL0" 3njDdkLaF ) ?b(yn$BolrkoUq뉰5,˘#y)aPIJs4W`o 溦+0d'Qvaӡ7VBĤ!VrJbNCn3-xML9&p*)B^$ D,3ir&uZ,WMAA2NUp/|$y\bMn@qX<^c~Y]QW@:|7 !07N*g"*Õp6le!74 -qr!j):pBI~svr1;H~r!k#Uy8Oף榶+ SK|ڨP'l8N{SpwPw~0ٽ|1]D۹Ѥ Nxa`OG w#?$В ܍*ތ LL#Qe GTjzssxp-!S%4lrK@͂[K"ukkpJ#Qbt`MqGshЃ|33~+"#,N.f>&ԸPM{ĪNcϩ/ ͸=AT}vÖ T*%S c2):HM_bҹT6 ; T4nHMd ;I's'N;.N B'wzf|>KCܞM)tĹw;^*?A ?آҙ˝VK}Nں""ζ;O^72J:;weB޳/ԉ5XD|=$J~4Ó(혧<:l10q_DzHexܜEph2s6.x4\K0ڟtelם|~Dd2yڻ>ĵBtE/wwj[ZF.;ĹeH8"3.'FpE8ĝU:"4"8Tq*TVɛg%:S"~ ?vlwUrWKo-F/#6xՓM[XMyuadcҫ)~ y/.5*H`Y)Ꮉg*3(lD1f Z*!A|-bDc+gҽ &kz )KlOG?[%b vץ|WnG3s|ΫHijIz#_q+ּ9$c oEʨ"U\n:ΡK+i1^":ψ'))LiˌYG ! ۇ>\]rOM OM.ȿuU.]j!2DhӖ7V*nlżm2_b- =,|WIp1M&MRrn0Pؗx:%i6ZBҏUCRr_sݘ"׽t 8n|ۃOSY0W22Vͩ=T)4F !STFAV%ȶ滑cw^rF5#-p9aϜPDӜ a~d9 E}>ޣTmR(E_*hK6E !TN' HX'$@iP )ݯZ>ߵ<@k^zU`w=/@@^Ը֎~q#ЩjGv<K? bwn;cG\x{c}H@ UDŽ7)Q0ͬ>y8upF2\ 2ky], (~܎GqTzAE'sYƘ P+o&BL)Q/he ߰Q\n&$Q0dsGЕ]y˺﯌[[kxح2Fi˶_wv̉ >)/oƟa0ҸF۱^̮8^ۋf^{`&Cgl-'b; &s~BdL1[LMZ{\sd =O,IXJ jQ1]pv[C$kz(C4ez. ؿE Әd tVlR\ |~}Vmjpi 8_it?㷈7 JP2%THH}1›N;?9 lNΑ[ƨ 9/VD!c2Z%#hT1(Q;i\>y g zbfUn=;'}|\EѪ%ki|,l|C\X稓H5kyKg:t;n"M;~8 <="4;Ä0`- 4܅o,Kӝ-q+gk}A37:mlSO63OyS;_x)7t7^MN7l8pC[~n+mPV@#J^b\] +C@%UT vF0äq# U5즓/p٢G> 涕+uZV~?K;uPDaR7 Xn`)L]Uljh?Yս8@)/bNV]X:U|h硄{?p\½_.^zw GtDQ^N*tRQI\^Kmp=z92o&E:^Py|ju,X ~WǢ"-D@[S;W/:RhJPtAq. ('JQ zi9 B潧K~tF ],d7w&9[h'xOv:f{;.m@$Jxr&3˨,F.jb,RɲXQ@($Z#)u1ĘC6@P%_>¥XAQ [##v|kV.UVMC4ѓrm|먢v}sIEGȊEkvWwrzb[tu|ؽSeky@Tҏ|+G9(FC7Iܧ ;a^LD5].0^윸Yim!]&l X>]A(f#I5\]rWϵW9jR숒GUG*]QﶈΔgy@QDWY?߬qԷ{`)zvO3SۡQ!r_"8GdL$AEGi #/B HjCQ,1^ 4 D u׈* S>dֵFȎ5%)J~ NcB^:Eb"AK>i|Rg65h/ 5CH4p }("6W$M<}O?B{5W"78nlej v&jc|^bI Jx&-Y4pmȗ\rPjUJ/Ug9upscG0Η8)S[fDm}0h7BQHJIo%ʽlHD) DRGI3I" JjcW2k-Ɠƕ*{9421mPTP̦5I:@s'8q/jOE֟k(w%OAƘ`j|5 FI5LSBnJx%%D^R`Bt#un;J#Fh EmDbkH~Ռuλ650.5kip2#VKQŸ`-lxwz_dTfӾ;vt >Ё)9jDڣ3ӵ51 lZzuWKe9n2k/>x0\ ;gWōٝPergJ-n> [QV3#7qs76zq}{R_x^/!_s+?^ ZXb{7+8jy [`EFC!M6g407MV_쬮nrP4(2㋌+##N |6urIDݮ^O>;_X۩wZIb/Ϟ0m6˜7XJݗ;}>>gNwX45U<W \KJ˫XFv.$#$6:#8)I9M%V ”c*%[P?EsT6SX,,,ɦ#60ɬ̳@%DS4(@$ecs]H)䠵[wkшE"UEaҢbmYtIϼ^֐К(b ++ɩ=R`$qIxY'H)׫ 5T!~fԹ89(L&gJ@8qk!T vdk2:N\|݌MH+0uI!zo _Vt?otndJw2@#VP_?TƯ՝>ԻhM Vh-J 2I V]YwѭᲖ|e#/;tȠ]zŕo8uh_ˁJTBTD]e)SU԰ez3Juv(n%_+­n(eCgK0iiT(#Z &tiiGϯNK `=ΨG~$F])(h0uNg}xr#j&۩?,Eri %)> |Y)[> iA+:Ӻ~r+"xs5z33SPmtchhc& t)8 rtqayR[OjyMdhZ^):?=NdQ ռ(5 6p(:h5 ~~rxۇy ;WU+䳴2S^s)cIT́!ZqJ \KO^.3|"yD{hìPqn=:{DJ(2>nqeB-vu5#GYዑ7Zq "|6ZӴ [D5i 5O+7/).{6PYfѯrddT2Sq52p -*MAr,Sƺ]F4@G`RI͇ Mjy? >3}᱿Op( .mUx[Mn\=3j}SC{JXrweR9,(}Z4Zٓh> :ahǁI >sA1J@y4\ *vgl3q q}.ꈉb~0P\HI.=%/{A ;Hgũ9bz~+o2/RHxѿB9&#U[_k5);#)㈩q~J)g;@∁d;0J V|S['@;B/Uq{6́; 9h_o"lv|$m!muw؛+h\K.oMY77f͛~ҟ2<l)%\._\MAYЊYm+.CJv@ `թKr3$*ĈhAlԛFCɪ1~ c EX/\?ǣ%{ <_N}{V(ۣ TUF%ưRk4Qt(u0:-`@o4:j0D59 8Р]ȠIțm͞)ߋ{m\P~>\f r,g r)hOx 3iq]?ԉ~|;h!)U@;+}|J<"h8@M#X1&1Ny`XIa3Y(N7:8$`}OC׉Rm}RV(Y-]5RJ27C6˜Zzb 4?z{ Xfo q)B>O)kۀ(![s2Ž&Ҫ56TP&^`~+.d70dEMZ+P]`d %&I@H3-77vcB A7#,p%wI:j\bN+B<9DL&P?IFI۝Gwhº٤ Ц=˲ ނZm@}t)Fj@DP!lH(}Ɠ_ nTXVͤJB vLbPͶkcY/ujMFPQ[[)VTE9C$*ndPɉb*?BBmcdQ̷&q4)f<|Qa/hf:A@e1epu&uцP['[3ޒ[TR:BCF~casLLy<:F1E%,.h4$cZГkG"(]#1-p0Co\.%Bׄ.Ҁ70l/ 0\d@)և)%B[DƑU7*dAUk*pbY፶!*{o}> :%CA)7ra roxOfl~-b9 CPR/A1bpBjKiD>GN&Li{)GpIXfhr-phQW?慴c e@+Qblke<~z~kYkAr]FFнbYǹƒY@-͝>T;N<_hL@ >>sʸfʧ(*X2]'Wk>eřX}Jr7k@t[Oeqko.o?LzC>Ǯ2٫8}=/~/g݇s4%h:N>_ȉ_>L•dyƟ?In5yf6U6|Bd<^#OȜ.XD3!uX7d$BX}CWjټܦ_oHd/-vUݢ^^De}c%Ȩ#{cLr<`"1"{a>ůlS8^@|ٶY.aY;TZb<݌&9?jr=N>'-ڛokAyЙ9^$d Q2iX4E}Y<@?.Kt{3ug}qgg!&@rǽ{>Vo\ւ0_n`;`*r_$R}ՆMWڰBRUߎn\ .+obfKU7wU/z3せU.]V2t2=2<P4pntB%}q! 8c5 Ls'W;ƍALrH*_`P˹U #ӵ:4%P*%V>b*w 8u &$iUKH)KQxyVgމZ|Ô848(1He op#Hi.0{ؗS `T`T]|AV.Q- 4ꎳ!s{0k`NY$4!)eM (Q6Wyg&BJvЫd}<+n{9TdhN4fk+ڑ7~~9< w Aq4_~\%#cg{ϳMɗy 9:Ha Kk@m)5FO76*A%,\:HH`ʠ֯2x6Feڋhz7ImE ZQ}H"2o۶i@^PJ,lM#{gf "wrpgmşґw&I]A3qNr0(Qdw`P(֛\9v~P;NjAcևL#E['ȩ๑{;|o>A(^ [f_TȺ Z f#gWA ;]Lv<">eJ=3,:)c x!ڎl*A0k+͟*ZԶn W6 Y,ٺf4-٣mŷqy9pVmVʥV|#tr;x61[pPQ{Jb=bb }f),7`V#n㴿%`^JOv`2$䍋hL)ցT3̹[]~z8#I*%_=/aD>O{OeYyy&7˭K'5]?y &'{f΄5ΰR1sfmpR&U7)N3gLd#$b+MlZ$ f4fʓj Au:Em->W!SV4ImU*1OIn䭷8GjdN"V~Y)CkD=0IhG[9^uxWhrG1rUu;WkN#Wpf<eU_zwDQ[Mxz$ (]7b߄z'K_7Qy%]rZ]!sXP[S>obht>Z3XeYvоBP:}v' 1ɫxdZ.l5i}G5v(f&܏xӲtd`@H;s`A͈hGa|_ҿ wO=9͟vL"Sп>{X@Y0}?qvH` >gSʼn0GșNNO" "I i 7-3f|N~W5O_}Mi  GN6.dV0gkq`nQ3K( 2,)i*e5tv{[%F:&pQvd{.wWzQeG`j3_8^罕{+V>]\Ά( PC "3ưJ&QY hE*,( ɹ~|(D[YanB]oYZajYx4ra||-LOގ,NܡT}'PU3{=MHGB*#0Ʃ"NqBv*S:Gis<=TyX\pq &gU'@![I^㸻MJN`ZMINB"]S~zY4E#^-)h8=9pocaSKP8h8[.%nL g|u?2DEci츞usI5fH'5tQCP4HsDJ$1I-/?Ya)@Jy"+03%38՗2Go˰uCDI2Oby3 5@V&i qS@MYS :& z2Ƥe4 ZhFA3XuGO&yL>C h/ekX^J24[v+[J~s"Fs]wg<瀮a1diyKaDW#(||vrq-RXMsr{7FNW[?{jOBL PlЖy%[os+%iMX5aS&0B5̱jWc%[#-2P FD?kDtF%&5iGNdD_B*'8j="`&8y{O/Ȏm;*%H"v!Q.$ޅ)7%tfgP۞41q 3r:ע5t8P/zڦ ӅA3oz1o)YͮeQ'bv՛/paQ$[a!&w^㋼CN*/r(JD!Ϋ2lu1^Y K<1(PPL8eZ|<%#sI*Zo6Zon}hdp8Dd =! I2GؤvBfЍn~+c_P*:OxOraﭶj{W[MXsLM+!TX1C)DĄJa2L,3-ߡw={`4 BJ;ZdE&ǽMYTeCe=a>H%^& 'hF(f%b8(q G+r#I cdHT$&h9՚Ԯ̨!R慇gV.qT?y8#,Oo!~6 "Ky4i @(.oƷ_kicnXh7o=/mISlMskm{%z~z,ZbRU \)*t4u^sE}kr$<@'N$GF2`TI1x0JEe aTG #WBI، #BlF)F 7@Fј#pxma\vߚR*;=`kH}-Z1 |!Ya>4yXTZ2yY;)855l#%DwUGJq4ʲ8C8hTXDκArAtvjmY-ؒ=<" 3gg".NW4?FʇQx `gbRYUy@xpUav&*77\pS6L~%i{U/ #8M/IrD.I .μV}5@aH@aSkaC4L!Lc24I y5C'@r"%kd>5JeXSe<)8bXL d)& Q I(8O 2Q@ zݬ9U1lQKUI5}%c<5f@M2[Ů'$ yv&ڮ;{;>>H@k8gΆjđiR [LR% 3 Y"#I,s,Min*ɩJLc0Vqb ?JiL']Y?ƄS+@)H`;`ZkYWmcpekM}Ӯ@hzg=m^J$EK(ȻwQ!Ae֩`U_MģFZ8pI5B/鞶G1` k!wIpw.qW<`&h$tfxI9ĺ=,t`<=bzxշ!X|1lM%?xV*יԀ`(dQ @Slc~Ҫz48!3$91Dكs$v&4]Xpp)z~7rׇMlyk_W[ Dt7 x= t\FCpT+::bO$h<˲zDkU"'vjFEbTpP& z N}+H4+EC~ ikjҐmg-Sƻ|xAYR apcoݱxvY"Oɦ') 4k>̆1sN(.[OǁI ROҧ~Q%V!RDv6V舚I86<~n>/o]}pƝ|u-e3Ü}]c.-BPZ*ф_u/כ=֖RQ7x:]-(=+ShWG{;jDpl']2F PW l}T(O) R%W.GnCZ)2% IS\(s>Bcbh*`(L>%UjE*n`>۾6&?\|}<-sjo1XSqr i0Z*byT]6 Q6M!јPZ"ɖ!0suPw;vuaM`ҤƛOM R?"2؆8`kNN_& KSQl!X; E~ R5ݰf/Kn؍'gl+<,oQP Ck:ی(hT`kѼY}33"!cQg Yw{%isHI1Q"5Rwc"hʯi_g9ǗnSTR}1^{yNo Kb<#dbJY䮺"5gdoưdW ^.y ^RD^ tbxk^X"I"E "R@(Th0Vw;2,؄jl;>;N+F]NS_E+AU| ӦuzՋSzj>&OzKq1g@#NJ("N$hID,JL>أz>rC|pڊL[jڹ+fݭ/HΛ^_%)ᧇŃ^q)c̦߰[j勡|!^y뽾 $̬a/d yu}ﮡlm}t^&G~lhAZ*I~^B=Jcψү%;*M ֛7opju֫w+6k sY6{MJszz tҪ~BHLQ{- Ic48^s PMB3ѕ ~j2̔SS/V#چ"7QS`ֵHv}]97@i}\a>}(z?ǻg 4 ; MS6|zXUcڑ]?USR qi(oWض/{\zT1X>f1pa\ ԀՌFwa=ЊJMȞK's#4c{_v*횰NBrJSnkJ`Nd&j6zE]mo+[{|8Ӣ=A_k" [Nw쵴]ɎդI,풜gÙ!g 6qNW{8 St=y󐋰E%ߛE"Dz #{"vf U-t;JiqFn^8c=.@s~̟bĖӦĺ{3o\3@ahq &[S jm4QUaz jW*WQsUQ 0}ZQ1\]梽%5jβ8lu֦5=^Nx8հ8Ȓ=A8ؕnTCR+ x_~= ܺa amJv2J?=&b4V3ZZ5a*$9W@7TQ6 AN ͐C*}gVƈ=bU*z8=?FacC κ5YkBjT2*/Pc+CֵTЦ421mtá zfWU [Ka!s[K5H+Z U!Ea/;L [BDݐ5}9VS؄*kfDT uyߌ*ʎ,0>~JD0MRbwQjK>(x`$HpdseH0G h|IAuʙ&Ni2 A{`&Iu n'2:$HcDϔE)@{Mq;dgb]wmb K̼ gb\ S5;KhB"Ad Ud8ÈDp…(MQB,=h+KtFs{12M?T툊dBE%C g)pu&T&r wCPnj=ѻw َ\_;^q&+7ڬ-\{Oc-[/xAJwBYEY.4 i,tu,%KTHb,@h7QawsސI;NGL8fٵN1ˮo-Ά'շvg* L m0_ulyQ9:Dc"Ih omV` V%eME. D {XiVf])+ݡ\J.-roUO˦^A*z[x{~̫#A_WMA;0cđ}|=^1L -Q^\p꣆w]ja T*ǣ3{=n>ڌ4L791s9?Ǧ2]͙0T7MK8jYM "a.Oڝ _Ix52"27^G!wew }/&LqC-ql *FsUxe.pzl J[8=mqKnPk+(u'l2kfg3"לq&%%"-ʁ"R\Gi9Ԫ0 Kҫ\U VPlRMDj=&Є>XJjx ǦF%lJy|PdD|hz7-S Hۚ>&gbJ Cg_vMB3JB/4,{JN`3FRpLA%e )MJ.Xn8cӀ&3o VP&)ή=`OuSY#%:&6FƝrDa"АС74B< `ZC@)rAeќhPd ݁y{Ag^AJw3o&(TJq}ovSf, ̠ޞp.ZXD:<.8Z\RKfD:SDRWGted9f9tnuQ@֮ڱh%xkpʁ=Xb@W2XMQ U$= 3 Lg 3+*AOo$i֯{4b><~ mby7+՗"^M5=xu|yCiN車װ:Mi=ָܧձp\;w4 )1 ݀Gj QaOv3>l[T-Z˃(%sf Zv]<ץ mە' AQݟ]ϥ,k?a{N~ck=D⧛Ye䗻',e+򺞒~8?_d|ߎ-|? ۳|8N :4,9ι<8zJ5Yd"TyT9 \CV(VZ2pG̴bȢp;M\A:!\Txǥf*g1)ǩ$p| X,Ĺt?uDa"uA'40ܮ`Q;4|^x Km1}[:5:ɝ's]NjOv}QQ A8nԝ*5C[& "$p}a8*`U5OSq7$ ^caÁ+EY퉗JҸ/[ :H2@Õ@YB4' K BoOh ^m!bCO1)?`p`8?W Yy{M\jHDqZbB5f4@Ve~-@qBOGi}WTAFsIS/87Wa*U*&9ic<ytM E]H4*9nmPBG87&C$]#Ԋ˛ZM SYc>U^"W9F[ LϲO?ƏUNkͻ~?X{"%z,Y͈ĒClOKpOfest` Z j"h-q8sǘ3O(5HaVNL׻Rʼnbߍԍ!&{s6~{fHˆяD5hY=^Mӗ7gCI,G=AG뎈fs#%:ǜ8Z6LY(lƒkuLCR)wC.KW~8~vlpY=oUGqQQ:k9aA}B|Ps(ow=D;]r>S?<ק_Im+agc*)W.d8͓4O<N듡~,P Xmy`u܁#J♵NZeDwu]l];_STN0]e!-ZrRIqo}DlP{e`s(54A.#pZAv` b^]z lUP䚒TB)B RJ;_ݯ̆>K1|wǖ!BaA 5iDF)>#r@ᚠ^, \;=Cyl չQx g/a#F*|$$ɥG^2\zzO*܆ QEґA~1Ua+3>y~}JqI!S=ilդ=?E@c;I4H<jY_/rXEkÛrXڊI^?s6ܭ8qOle`uNwwxIiފ|O}UܩU8(8JRys %;_ & sOJDZl4)|2u}^xt qa9ߦ|.;%xdoKT7~zT\~'.ig~u!bޮYm-u0‘n?q7_}__ޜu_}r wx7d/}茐վSF$rR}hD_tń}|SdJKeUۉ$n^r!ߙ]ژ݇.2SMh<#p{L=r w$GFw>:ڡ( },#[\ v2(Aϑ0o,2@>fnwcMYd0[bՒ( j7~Ud=X,*kr0b2ֈZ.B c 7sieQ}і:j,yV.e9IB AItUzoQ,d^{~]{]>FI)_m4K6wuCDW,wUvLVKBץ5A\I>-y#Ypu5^q׼-mɤ^w> k_aqv8eԦ&;=׿/Z9ډ|\>K-u~)ÌFߋtftmy4poIw >!5R5\x*ں}%Ym*UW)\'k c6AڊJ; `Uzb[~W(/dZ =MqiT-_I(=\#2ljJ/SȆ& $>u$?OȕfIB;ѥ(|_\, āk=ЫEb Ձ#0fjrP^6Ja<k]VJѠf;QlYtJ%dȘ8P~>e+O+bK@I0-Vj蚩WL2l]\"^3ِ{h` .v{D5o jYíIЛ Z.8V[﵂ @Ks,mHf%z/,SɃp".25F7u xt"B,GI>"%ւxH.V;zkB@][]׹CI.qo 'Q2U(6N[tH+W͟co?Y3e,iWEvlrXo BsW_j4K*K11 T{wF6QV>eE4ZsbۏVWMc\}}+ =yu t@:ПR?ap8B0.>L5i؃ήn/?t&#wJ$*PUwɑЭ Ze+)lLVCD;h7Y ;8ɦ%[3MAxJ97`p^ eA'jN8ZxIkÉE`_9`}FA3#Kr[{L?:zu9&ѕ3fxQK=XuYctD[AB\m)U]a^We*lWk3{|'&# |ϓlvdRJg^)gS2՘Q˃g=:zu N+,ӎΘhF(MaY-p(%L 8j;Pmy@sc.qm&ԝ92zUҊF)M+aF&uRGJntZ a'Ke73:Z!=Zֶܹѭ1 Hc^3!걵̥ro,TUWH*X"3GD#UxƶJ[Ij$dmfNI6` CEdZ2%AkIº {w=YcL.W ߎnͿ\S2n Hss i9+Vp%s^V]M㯫~\}F׳XH0i*=ONKCx *"^vL< 48= ڭ ^%&#:gP&,uL*XT;Rפd߁z;˗,p6s.1km>ZLd5'лф ʀ%7w lc}'"|lE^8 ^Bmi-<:.C-(%X.JJ0A!oZ0A`72{#j53<V5@;Lm@d8O^z|ҦN'bpթI]ku/Z7~#%U p`~c.Yv { k(POF@;,u +$6afQǡ 8TÛC-}#?uR=mkzz2tDFڦ]|xpy5~?R>p8[Ha7}/ve@fPQӃ‚4Hl[7o%6rmoY.1W9fr!qkx۬&דYo*P_+w^bxkvgw:f+g`wInvA[UcߑUEkXϹR}-{:kvKڇH@WCOg*P R}e>|CL+΀W$T&NUj/\t vo ~<ij,DICKRCW 3K }9oJ+!k55U*~UQƦk0a<7f}){F*6`:\GÁEt5Ґ{W ?#CM^6Xf\~ qE1s y̽ =/>y"k8@풼 jf]cԾb9gll`Ш= yۖĔ6{-!w][1];wr"X\kCWiTx) sz 0~p&fq8WCMg& ;<#%#r ]wr \nJ2\R,m a{'<--֭!e~iEpYJ== *B}Ý6]ֵnyq},"d6Z8=f i4NmsFoaY"eҶ\A%_8\/ߧ@& ˧\QY0 IzeNIg99zۜȄóHBɣ?[%aP]ŚM e XɻE5lڣC ɺZ-kl(BGyzth;K3ҌOskh&ǃUm嘯P۲Ɔ1}`ل!S.{!nzyy8?fLx~9r> ud)®y %] l:OęJoW 4癳o==qeia-$V:k~Y{qMB`g[\$֕K[܉^:!iH9KNgRU@R8LOڨ zLǘ$0Q2p bf'pn@qt 9bV~G56$f.vyvZS[龯{CI;}y(<ʆ -7_;bQ4ְfy (W`QƃqAԩj͙L/Wa=((i\NnAV@IvQ)aDyr#R1F>[i  *icקcurw,"#0'֢NrY!)Zy>C\ !,Kۙ>v..qI|ǖ|6ͯy45܄77?p[fqgGᏇ nib#Ҽ>~xlh=w|j-!8;?zվ;oݰNJssG[7G ~9LJI.^'٬DA?BmYɾ'_ S&sN)g~zHrN2,l؂![ÍLISH1F)oi鳰J,(\*r$a )`1`H*,w ?w|<[qJ6eO6AV*1H H"ΕiNnv&b&wMSB|N̙Ȧ-u:dodo4E)&n.~ז?23wGaq*O͚ *;R0Y:-Da ZN<v`,I-`w32O6)l\ l Vx  Urhz= Uze cRf<fiߩ7VK8 5RpUnP1M*UN rqsPBքS8cؐ)ލ*4mп\]4&hMxo<<ǥ2޲\ nYvn}o}Hvo?qx\n1+u|O~cn"pZٮn0fi׋NRe4 P+T7AOy\y.ज27G\,ss9EGꀞ>3IlvZN[8n;Ŷr85 _n2mJW0\l ЈCy3')ȜYԽd}`i<6<Ul avg!C 3aA!ȳwl`tg(F?񶣛}3 ׋_sxt٭tk-V⅍s6,\i۲ƆᢱCqAqL/Biש~LP>-/U9̤rwk߻2}?2U:3Ci;7 BGi#r ̀fL73`K(\&W ԡ~3qٸL YnunǡO4qKu}~uuހu5 is" lA`Sgg`SOOOVm=z&p4N}Jg`wN׾KC Lw90yMciD^(s( TA$wr20Y| A Y&!d^-Fg<;7 uQJ8+R7 4FefzfsvuC;NaYgjg{ }k$aw.ot TSG$bV~ڀ=жx?  HnwZ|ݐbtݐ8pFHT4ӵB$h@)mBpAj%pθ8O@ay5;[`Hq{w4 8 XR&n mPL6wK.Kɑ%~V[%_#7dOTp+Dzzrщtebm>ZN vOGklv8)⎌6v PI>maA?%*WX ;a|ˣeGͪ/%j>׸hD|?2eAHyF?"֛c'P؃oR:NT_Kp 4N`wO]K|T6n몑A6yhjml#?jgky}57ԅ>_nCh_H6GzPEu%7U_~z!-ePeZJ@JɪrW(޵7q,{RZg APW/QVapJlYJf,]4rfS\VUJW`?2[TEbB SS{i$8AK1D 8i"ꄄ\P ?2B UtИ.+I7Z*>׽㠙P>~vAZveS eɒ'K>6X+Bٛ',MIuZEM$R=1]v.Ilׯʳ/s=~C)tn.j.>-wpbM?G= HӺrB[LGd$T1Ƌ)7z>NàWuI((ҵ,; 3<`3! 23}&ҘH?BAA?Oɒ.\~ku?U_ zoh?L<4)};m’m/>svWoϐ9>w` zϿv4~& |7ЫG{[0dР9t>>|>m~{ /yvye[]-\>5W׿BI1k?Lt4g5C`H f^tL4I3[ SK/0fx|{măg?DO_;{_`X9L[Lk؅$j>~2g=>>xg;.|9ƈUyj9~k #w+k扒Wr:88J݇y/^{Mg7tQrA1pzq(w`|O@2:I(.qr)tsJf_3Bc{t[/lwR j1 o GToYaCw:=P.,0n +v sflr oowCGϠ|~K{g,txym{>tx@YҝNeΈk ]/vXF.g۟ =cA}k aɎ{y&O;R?ҧKMFX*$ej0و]F_Nzr|_>qnUֿrEq3[Z:ʤP&UeX4@Ey%m-iY@|- JY9!H1qԠo!,DIE{  06b\ZSP%Mj4 &7+T1gDbdpO6>;oc-!NѻzάS|Wϕwƒg]'PzvKսkskRURUV**V+HPu)T\U@1%8RCXi#qJ* rcf絪 U⵪W0vhA:8Κ:y aX:`,EY?ƪ@1uxg"T2*8 ǔ&*lr*:ׂ#&2S!1[;È%ـ!Ā 7Bhfb]F7T~́N21wlOccؘ;6掍c%h(YR泤gIϊ<摶Xu "25 GR+!mL}Hz vY PtpeT҈ɬAA!` ~JX;/gګˤɾٿ86gɘ%c~Yј@X,y_D@/2A\SWX^x$]*wV4ZNU/]rV%Y-˴FBADX*v&I `cMiNh$a8H,(J*%Cz-]<\L8G2qd)G2qdLe[5wExɴz=0NC|oSQ9;6I23]|Iټ.:8MrL2ޘQ a덲@r̕*eE3śvy*&$$F VZW] v^^a R>ς9{{'gM,Yxf♒:ɬh|Phބ{K̜"2si wdNP혗=^TUcpP^P)P &ʤ`gj ZBPNUuzIIxD" L%BELrM81׬|^&pJ״ϥb8W-. ^GW% p4- mx3#--N[B<04KMMwHVj_] u7r]@7"d2ː \.TJT`YEl  :RsAPoMg=KI,\R,\Ί%KhX[Oelϳ50Kзy "#E|">)]hגr`X3R*GV FZߧ `$`5Uո{sz@2f2TXJŏKAOH2+?m?,-ʝѴ @~: ĕ0O2Jzd@U6="&hܳ@;Ôʳl@?z Ld L,V;)Y"ǂ׹L{wxo4/41Jz].唸>.qks^US_Y'D>pDלebaLOGDnt*Mkl ʴ5 iIaR]Ȣa{>aNoԹչEW L7T"1eP̅G4В,Mh# *ϥ0B3/htO%K &5oS */`Mh# *?!6s&k w}z<] tCdN,0!SЊ6MhR PC9UvEAFւ@+bMl°C1" ϒ_L0a-/O;iwCnlȼ )+bxjܝSgSm '!`;r]zóv%P]d8^.('>{Wo|tjCwVz n8 % T֋*DV`⚒# .O4^ѯtv}B"ɾ75Ku)E3KILfC`Y 1կf2v7;{3S4ǜ_5v3JwGXU zgRDb DZѱr ΀.'\Q65c/xT ($FRNa%L+ȳ,iwc߾:5WYLΌjԾ;j?Js&qΦMPR~ksoMCJ%%w+A1(@hn.Xɹ`[+Uxs[mhJrtazjb闠Zz: j$`eZ~-C`w3KP%B.44% OR9L{=",(O z|Ж 6Lȼ+ϙ\kcLi^YbIآ k оD=/##1r"ĸ4H[0FBZe^haQ9*K*4q KNgBkXK?9>OA ͦ-S6Iɗ =U+RfvgŝݱlQt=S_W}՗|~ӁZ8K<. j3rݲ/}NU66ZI9вhh=odw>>\D$<7n~}ٻ[)w㟶fsÖ!I(!vL>Dxw D.LEXfrՃ:s(xYEݛ ~[=XO8EQ@86ֈ%705KeEbJGȽJ[~eC'sQ)k[=GnY;=iBY8\!SB#XS"PpaU8>>wc&as>$E57&rAJ5/v[ٗq`i<ĚAI@Ã0Sjqcr+*&>԰ReGbf WZwq^`_zāݪ1:P3зZ.oao\! C.‡z ;\4LCvA$a<9I)4ఆQq s}EQ13 L΂}׫O4Gm]YFpvF^vx`F+tr). YLMLMFn"DsIsEޝE6?jTM=l%JMicnǨ,ԩu C" 5>QNuye5AUǑ(J8W)fzIIybXvY{usR#;TM2u5)IAќZ)Q6(gOB kAʏJt]A&!AooUtW/xD8|]kj ՅjR_@x]$JS.:_׻mhCqHiiz45U9H(_ay?z-bIK&;IƗ櫓Fi 8LmlS68{QU'ETsyۥMeυ6Ex$*3*|ϰ1&Q穳k}'X;1 )L6_= $A+i9HH[>) IK$AlܨkyM`xYn"֏΃n/Qƚq;5'Iyką] K뤪,7H.A!ƱrmZ]!i]cQM`DE\8 B͑>.t Lu}BUGh1D4([B1Af21y;Z6}Tt6+1r$-$ii%5vfƾX}S$F> lO1W0K,\k-i7r{+4HM}kZ7zКSt`Nc)j頙DZ?"NVur`%(r~㍲͖?xFሯW"ov-Ήxq-/Y)zo_>ģYf]>v>dk{ }>W7w[ƝO$f.]]pZozs{.bTZʂO.^ֶ̰sI)N{p=%F1}ݩ6j4QAq3SuFeXF!΀e: PǴC1]$fR#oH )TRgm48돒-":1wޝlmv8,2> f'/ML 3pJ̈Sx DZ椀 މFSt p<2#0[GLoMr3 hifH g;eqBI 1jYR#QNhL&>lC;N5Q@!:wQ@G킨u`y>sP`_vdgQ%8]UVNWD}HZ N(D<;hd2Ǵ*[ƥ-2-'Zfʞ=ԺGfRLFK+\hν"_K-q$~=2*VOfHc2{'25o :8w(=7ƒ䐇H6ec4.PcPF5Đ"Њ ͹\*5cyWcC$YVGޯt8Mo%!¾lFaי@AQ#(RlcB'4!m*w<|"7 8tG%=0s8FvҋKSrfezĚ$ 7Y g CnckzO)؞W Fh ct-$E#L=yn]Q9V<[pqBrX(`:vd4(ÌhƗQ#5LlߢKd ENע=Ǘ5$iZHީFu9ce:X٩4|K[p6cJFpgJ-ҙcXp')N!:4Z<ᖱeg-Ge 94M+8; ˢqX ~e?Zp\|3 Jf'd9f+vy%)dfdR%]8p NR:Kۇp{-)i -X2Ŧ6Č~e lw"jDL50r!qݨZiԫ@(N@jGɄ-з.fGx{UvU9P],$'=7?mبg]u<20aԠpX8RQ[kGSKʘS\ ,u}5nl+8wo8gf$oJN?o9t:ܻ v,kDLҰC$崀A!^8u[Lb2*_y"Dm8;V4$f0knA⛏5o.ĆѦB‘ V~P2љOAti^K&0=UgeN(1=Eӳ*K/}TFvv-K`|sK[{PnBfqd|w<.GH1$v5>`ivpȲv&\T<`i\g2z]173DdOXIav݃Hjw. h:'C3yB' $$}J8 HP<D;+ n6$;(9S(aXIB1z_d/NߏJٕkG('E8v.*Mbfp]Df ͔NƽӍ& ;p-$mSl[r"d v17U9Z MnYSp3Lnm'Uay5-Y2Mʉ/^|ݮ>:kGn}]Jk{=F.o뻫#No.n_0wQ뿼vݫCbn?]h@L=@p8ֲB1RWo񇫍AWd̸¦LS^3JokOgj5.o_^C~_ ߇2 ,/,xY(7W -^c';+h}0hy'/͚lzS=uEyXXO/aE;ZGbhi3/i7WuÏ]p yesSwN$֚IWpx(Fq_ٛ]b"s #%~e1s!%꨺J|?. l5T*W݃K=%QtRYv=x a:7 ?V^Z [{?ebr~v]y1:# &]nYˡApS+Hcq +oDbdJSL> ja<([h1Vm5[oER/8vS rٻs(gӖrj"%8/u R3̑6|j qd$\DXm0r~ג|"#SF-{;j%1dd)(ȏ}FsFѓ5eyw\9 v|?qlCBp͐)}! v3@$_zg/.SCKc}[m>7,-޼ٱ/cg*L&1eBͮ]^ĻXV.ݻZyp &83 qY $k)j0aOz4}#Y v:D2",XV }td%L$RK1lg?~8|5ı9nL3+Ké(c3HTmK>m5k1l)ӭ{EQIx2RU0O÷+q6v9ZNY"*,!ǟyKeik@J@w?lukq c_#Qp.ܔD`+7pB6>!/k]_z=]~XxU]S$s(HJa|8nG!^-h$vaQRs b 2:% ]rBAFө( ;mt&BMX$ztt$$$@"灖2KVRi.7dUBb!1k3Soksxp-|c@Mjez'[%DwpwY-㟟Z!D;iףZ)1X$usl3Sz\" N* & <Lr^)~?e?Fe2cCغVx1fMr~K9N3J@v,TWݷO>JCcOOwN7f=eq'+He֣_KQ=Q4Db sbUSDx嚢ͶY,"H%HP)D3eKex߆ HC 㪔BFf:} #w&L~˩hNS烿ing#=~krl8̆MQsY8ZRFjgeN tixɵ2#Lޡ|`[|)"x"PNk==,[򰰱X:Bl@bY0eRYZ4f5"/EԲ)*j2rl(Nǖ0DHpK;5?吜{A urnX|HHIIoKX̻+Y$1BtO`ӄr*زj+{W5k BN4^͢fiT͈CY~ NWwhjtEr6m1،Jtc]r>$x JBe71KZL'']X\b!$[Ȯh螛W@+Da}T7wL?½XCfGNA@at4,DmOx_q8rR =W݀C?a݀M-Rd(_ lYEz 1>ys9|iaV}A'H /hE U< Ĭ9SfnZS)%/M?1I2bQ,mٵTJqOIQ5 ?ꈟ툁'[1z̉eݲ,n5uR=0z[nқNJ|TKƞ'mGȱ!cj=>j^Nuw6 DQ{s4d 8qP/ s7 A^ ǻ(p.t4)Z(q:gKJ9!jGi+ y[y養vKAD烷Uٸ n2j 1Usf6r0rٓKǷ+9dyH$A4HAd9| E#9,V6:`&*Gt CaL{x ˉG(ٮr\NJb¥&WdH”Hn!Z\<<S\T?Ϋw6^0qW!TXNMr"|ix!PX n\fĬ-C,+wٸX$rM1fn-L:1Sπ;oYéySf z7Um2 n 5^t'F>KpPGOJ(m >cs1+ض^ᢙ6ћPĽ 4Z^c ]p9xR9ʦ,[gpT8.( _,zoμ6 3(8Sk[,}\#Vs2lsٹ,|Li8Ѐ3_(lB{Kfwer}Mgrr;FmO/}:c^2Zbo6YMOJʶI+Xjnd347?-k++;^lx^|HNbʖ9){(GdD:seQG`~0se1z;`6N1M>]SQaE8fۮtU5sd HGѶ'ʫNΔ6D)O'.B&9O :kur줻+K HɵɏwNt=y(6Q5&<;).i;tjNNʷ9tAoIh>yv[QfoGe 񮹁TbCRH (Ti0Wa,Vl{[iK]2 3R979\Tk}ۜ|Rs(4j!y}rKcY.5 "e6mz0ؑ_j=m]% %-DK7]CT Ⱥ r\L2…71pjP1ͬ+{1{uk'Oi[`xNxۜu3(NmwsF0R|p1k.2+ 6:Wʡ gaŤ96T5)scG]Ͽ-懂DPpv0a(_?ފ7֖1 :#S'Py/j0iMb6Հ]J {宥#KO~5$jJr@cCDVoIGHqکY"Y $GDRթub@4]M^(wMYR,L܍zF^CcP"Ya BP'W9ţ!IOhw$89"BG.^)5 ysW^׼2,BC-3*gQ#HErhN"K <ZKʰԿiWzY6]I!^$}uy-A'&AM1` DL k^m 7TH}?ܭJ|?Z7>O:D;= ge)b@2)c9i#'$raka.%1@sKT`&6-)þ\HMM*A7#)ɴ!E( dU`gWykk/#̇BFc8bK W CA_*fj* h+50LbC2K$ڨ@RL̈́ViQ(PzE_/+KSj563*^ Y+'WwxL{cs.U(^" p+# d޺8tMsgm:z|wt%d7E O1|jFf 4;Z2fTo}pPnҀ`Fȉy3X˅7'RVW:6q @-)fY8M4۞p1ar?5l|Wi0a:󆓭Dc0{`^cjD Ikg84 "ı z$p,ג8CP7 iDCTwG:=s̚M>`N&khK~lr2'LI ;K-u( Bn $]83d<o^DC'7]Y_$vA!% ą_+xj! yj4^$To% !mP\2eF=_3Te b֩S h& $'wgDJp8ߋ*{-r6`UI5sgL-г_[=]<` f!"9e!?!yÁD( &RElwt , ޚYV$X{>Y%KJyY@/K<,c1=v^GGcdH&s p;Գ#ޥV?k7{^W'#y')>bBc%nDJƜA)!'IQ`8f\5b@9KB~DS3RR D{b 9 >t2D cs42j!mo3ij+^LHؒuA N.𛐈@3 mD+G(\SG>'@bŀb P(@ |ݰ֋Ƅ) Z ke [G1WRreX"TbTZ?"%6(pl~Յn)u9וBtC;jD^>DGz(pVϪ̓í>w4ӶFڜM>ҕXiuU,6Dvszd6ڄʹ w?6SĖٖ1 6٢5 k[ vL@bONM=PJ"40$kÈ6U=9 QM&Dy4$`ͦ3$DI@p%PJ,'B9(HH=>I7jI|E_՞Yl(`Lp3{!Vnx9dug(%WW%H(*h&:^ B6pQz\0+ (4R*!hbM @@I͙ 8NI BìT)̪/^ʌ`P|I|"r.Nz榷0jvjR 36WFJY|RrZ[yO;fRp%Lœ}0 ǭ!h0)/P)f֩<0gZ }+2`b8`Oxc,%R2kT& 9*IKi5dzg;]θ\H&2izh3ԿxX@P$i}&lsARBv%1`$niu ba"A4CfYxΉښlWhn^b؈[9] &A)thD*QlAQacu3,uJiÆﱄПWWQmAzgǧl R,Zv 9U3$.2Jx3%6>M^/2S~-TGOL6buT a\U) cJL1't,hDz:ODB6eҺ +t,7 F:>6|yOdM<;M5 gL7o`%-N1Nn({ P6}%땧`ME+&E~~Ik5jUT@Bv+G (O yNi+*y_xVsrWO Go2~){-9u7KNp ͯ[Rsq#H]YNF7iP%2S%9ьk9U-]x~. g={s_*Q!߿fuwtI~9:_`S7m3b_Xq ŷ7KoS-8[/~gf_D[ ۿ@ 07MNK|7A(bC"@$\w [ZUQvJ[~pyE^? /*Ibҩs{sE=/È+"IG'(dIVKxskcCYIw+h⏓^guSC) hby W.;I=gܙPgh6.͌mƓKC.^-l+<\wx>/Ό0:)s"3^gt1Ti=_}<37ixVR;8jzs=sP4K::;ku /{=0ysfZҞKS/{4/e'ajg.>4@~ӉNÏO./~-ANmWȹI_mz1;7?y7v<7Tx?ޜ_w&_߹7éepٻ6rdUvVxIflvd.,6#Ė<:2 _[[vwKG`"Kl6bX~ޗ/2~t{-,^|}t/Fϱɬ`=tݸЌ?džq@-ߞE-_OdqRTR\,90r^ ?s>ffn~,/~.ͻB<_%;|w&Kc-xʽahkɰ6ɑxm=b6 8)T` LDw;(&CZ1WLt<[6y@j$\Im`K2Y|J˼ xٞ ڋ7 >_׿etj@Ը^ O|@`Zd/+_|_}y\FO_a.ǃDpeͤltx (*{4|3MO)hrSxūO9/QdǣeP~ IuL0i;v1&|1l>a tsw,?'.8Dnyzk)_^MU6 慎]ܴvD!  b1GSl1IQǏ3t?(<3eɘT( [u]0i x'׺r*7P!jbNQxR:'`cAG\I wۓb =)}ߒne޶`ų?x$Ʒp*{~?zB?~F!.KH}m;d~Q*qvMH3&Y`knT\ap}q.9FF@2`(':g8"@h2J$ `nRs*_KKa8>dk($JvN100֊.&VĎ`QkLJl38lEZ\YJ?M( Z 7ATptm~wc`,qv'ƀcߨ$=;[=ɹc`YhU06PZВ9PdϠzeINasH ~8U^--Vicf0L  UN`81̅5(i"2+%>[D (/XYw9_aU >؁oG?AM /W̳A~=TQ"Ki#-n5#G rw@RF{ 2u?%)ݛN Ms0[y."#IyXEZj]W U#Ej"zà xa * 1bRYOan#Akq:1!0JW(9 Q:A{K')r/%:mCN@=zP`%I\%Os]eMtd1[K:ZIaLAPs)XI,ÃTP04԰cn8QxO'gEBWlE yq;`2Yd~v߷'{h5 ۓb Ϭ GE eX2BV$eYmoWokAҲۺTG*;fVJ2Y+q}o,3R {dHo?x(ǩW۝y9EI9t۟pLJӤQ=8k5mK)B9(۔^(<@@%wAY A;Ū#6B֤ч%*g)+=29S+AN3йb!@/;"  JenY LݛLhaȾv;x|aX-a̓km,+]sXqWk@}9S!J\f<0.ϊ=HJMb9H` ]?~q1ΥQ8'og2?@ yZ֝v}psAw~^ -2d,8EY WővY[3@<)w&M@]:jֻ/$Kt=GWl&VIVNv\\#JWR oߑ.b!̎`%[ %UOpģAɫ;0>ϓ)+š]g*}n?!U%r+*x9*c3Mʌ#P_J6*< ;y'$ҕp{7%s "iЁñؓrdz\9cO+Dr<|hKvC5ۢ2]q I5&[_e͎D$zzqA*Šae(u8c913`~ػBWӬ;A t8;710N_qI?9Tk T/wiٯUcJt]ߎqTQ m)R ѵ|Ҍѐ~XTkyw W^X_-."2u2|Sy'{^@_}+k,Zsgњ;Y՚&x^<0BZ8u^XA<\qE1y82)zZ)w/Ǡ7}i"H![tÕ #guZ^)ggYU[`w\ &=Nh8Oý-zVW-z)‘ J5r6}r򽟂{?^NN?ƨO>S1q񻘙`j/{m(!H6j6|F!G&@bZI%yլ_zr"a.<cy' ʔ_ƛ&S8&[R=Ra2lEY*,F(rVrYy.\nR&0ǂc1QAA$'&i ̃<]7|Q3yfk\맮1~J(]P7?.j0UFmRɛnfmBl+3*,& SD;ydfPˢy;6c]uxeR]\k M/`v5-\NQgN3е`Xe7u8U7'WnA[q! +D-!ћ~/@j6?\z[RkF0:Q"oދpu.j qWvlgв!]#䵀JJL#$#nn-N!RnPqJ•f{A1&C A8)W` )<%ZㄶG XT (Ok1n;\QJȂ2o  ͹c8EnSxCGT uAqWEvJph FPryP.~#4 Z'%pv&&^{XY(&"^&G9 aaJ)R#X )fcBQD(DІq<i0 NB֔{|+; 1a\óXzJ8#XL-n*(HF:u#Mkc'Z9k5w۵41#;GHF($J^ F#ƱZ z`ssa@JlT30 Ž'̯{lJ¶ ]W:->1?gҒG qk-_}֠"j%qCS' .ڕEu:?v0|'_F4g>6ۓFP&ò8Na1 (Er؏.ٝk78{B[c߁Buk;ĵ3\$B\KrqKx-?{~_T1K<׫[ib:D*",&,"}{Mm*h|yÞ^_ WGIGBwzq!q'9V6umwԂ@vU;oNJm;.1U#:BW\Hʹu&;A{X>@j35\uw5`7 Ќ=6 7iG 蛤[WGVc $_Nrcz<)1Gm9@Kp4tx`h"Ђ]}{Uj*U7B"gt+(Zoh+(SPnA%+(es0,S;S[uRbpp[%Ȼ'T)p3j _p4- C\'+>j091GՃPuMgdrZJp'R3/3}$z& OK]?t5_b+snbeV[fis,qeV-ˬEǍI^T> C6;6㞗-0IcT-SJ]L1ZᣔJ2/A;,8r.%)7I0ԌkA"pIN0L8H U!CAX Q2e8m#\9pFoodJ]M)/~ۈޢNqAO(5Z*S:;0@rsÜҕ;yAƓO9 E H Z`퉈"j\:P'Jr9jVo]jQ(T4$2sU8ͶnwI4N"b(\&QNJSN2&y"f:02K2O`5O+:;S,NQSl!g( GQn (' ̥=G,O6! 9zp1FXx_㗀CcDHp^*f&ew{qݵ\turR:Z6_ 2Զ .i#J0kE=g 8=Su_*5c2)7 PS)њaxfF59 B| Q0"2R>(ii(K! Z|¹.wW 9YzVI!sIJ[7է-3ɩ0uh&I! c%kLrIN +FVŐĮBJԘB`/71Q>RՓRì#ĊV0%'zwKy|! bͯ=xa<W 9?y7'\~1ҧ\uWCy2$j{W LA*4Ai FO A ]\O]6 YEY􎨚(#c.3 aQ59#+;ՈB 6M<}pm,wVќbO[Q%)QWJHҶ)yrpX?&.VýWgI ̓%$A*VxN LDkLz+!^~I#UgGRDW?R =m2іmv˭l{fڷ % += an5th5Ei:M @hcĥcY^TtOl Ϡ'~_ w'q0x<2c [@^}^ZySHh>[Ӂ(%~9DY8rcᴌ/OwzkG[ڰU|G4 9ΥG:7*6S0ʵOIOwNnhyټ%Fx=ܧX_ ,L)OĵǝSlkz88qGzbh9 ,aB(~LqK{7l9˛;Vgd>Wi{y-=9;63o(Z`hv< /x{ZLh̺E& 5_V!|C?%[YeϺZe6@"MV=5B? 'JP`KgQx{o߽Xso|0a 郗Vi,l0Ǘo5dXh?sE2:37RPfdDJsʹwajT !gI/t"$J+bD[Zy|z^ D gq D5DF"='(.~z̘D8͈>k%Ş3sd ߼䷛0V>[ `?0mNO֎Wa47.]: %߭, )|p6?`;T5IWl-nӏ㦻o!K1d( h QYw#]Ov"lRRe &LX/!ISJ!g6"#oSP׾ v-'8^ڦ77@W?I˜R(!$'Q)y&Ɇ#I{C˕A &2?`.v𳢧cQ@?4H ƢoZs9&%[-I&<՜l,`Fdsb^8ZS`@ӭvX 헒].Q~+W3C:XxXdЫ$ k_]_Y?ov$쫍@hEGt\ 1]Lr}duk&>%z%dD!Qrh}*r&ڮnGvsTY"bTr0(`:̱ͨiFV8LEPFYRbz4{Ty7ߟRtuh4\9moYb@eνf?i=0+/O:!l()OLHJM[/rQ~!(=fv%vU;rx,]a0;]/1yO,qwV3+Jv,Dsg:xA&7g\^Ji(ٱY9Xd)Vp@zyQMkҥ/Ow5I0vͺ4rN.9:%"'fYq3t}tSAZ>K'E-AIλ6&A DKL\2'I%/iS+lMd3%q!4vRKJ Hk]򏍑D1ik]-sǐ@^2qJ: /iĞ /9ZLH֧oy E ƞ(Uk?.x&.\$oT*U *`UdYSw"(K;_*jף<^R/ ϮwQ(e-),H ֩`69I\"i J)RxO?&媏Wwg.X$o00o8$֪i/W-djC *S&P+ \v]aE5EVeQktSw˯W9Wqy&ϋT5=]Ls\ATRuzoXj42*z2U}HbF VF~ mfpTs*πaݼգb\_X%q>z9bxU&!\I m*dOO(UZ+/fmx3WJC՘[w+Ѷ?mg:?>U7ںT'FY˜RH(2f5Ɠ=m<1y=+QYTQ5Qվpt$GG)) M {\rيF]P4= !Z:h讧Љ5Hl8 EƬe.~q _cSBp F]^<%lt2]Qt&B:/pCaL 54(Fpa!1r7Il3`t4n 6 [NI$ͬ^k:Q3Es3:K3S2T)0ZdgPFt Pp}3x;eح]T=mf } ba/Z˼hs0K?r~ )FI.JBpB`ґAP'_6 m(8,:0CDyCqjNG)Z2ɱK-#iFhcq+xlCQ**"b},| N4_ly*.CmܼX&qߖYda\[4<^s~ĸ&N!h"˹_[* M^ c\[bGm öc8 "tHh;`jOq'W2B-cwV?N0^-P?R5N `|O vMyf ߽>ޝk?>t{oD[%=n8x懃Eg{-?g3z'YRrL8ѣ]cCS -0_T% mq`:~sݍ*ͪ7'=zKSӷs-l3k[Q@чci nXy!kw Nʐn0lv0'fg<UiS147_'ⵛ_szCfO#WGS?<毮~s TgQRތ܌s{?XȬov?Bp]I*d]Ƕ/+WOC vdQ:RR4I2 KIRY U8J àدvyٟaL,ꂚHviSc%%o:tOnB8-3KTo.N8Ux|6Q :ǥ/Gԁ^ u#sQE1T fe!*\9N6Sl JJ[kͼf L6:H H $gd bn9 (_x KNMߊ#)ڴ%bS%0Yntք  K#c꭭VO5]?ܖ0&ܟ_AG9*𓵵XsMy"nNQk ewn2 \|d,4pB2Vz?>yٻVn$W [I~8$Hf'/;0dv}lǖ$A-n,R}tHV-UѣҎ^rxڽ6DҗS8{7q u}?fkz͕_N6hdndšOOkJ*U]]pn:v/sUdRJ gD`u>n v k1٫)cڧtQ'{]!S{q3F!:zdikcOsz,+ <2%%[i7P_)=DlHkDz"…rjФ;XfcQ6b :?UVXQX$N:Z0Fp0I\sG 1 @2B(@rb(/8UmA4:tRRh9h(7*֑ą匪rD"(9 )ade^(N3 ?1aY DZ6PEsE5">?܄uʻc[%aۢ [/Tx{W!7߾B.|{}]偋cM_=w}޺/W0aW7|u1qǧoogw?]]Ҭdm4zyT!/? \/W }{w{ƿm/>j@#/Q:Վl{vi.G|8[r魐XK{YsmBrzOhCa SLM&JLK(PC$,+PT8@8>5%aAs,~2Ԅ8Ca &^˩ժ,+.DB[Jr"*VPG @Žr pQdHk':qvm (H#!P *yI7R2 GlG> S .X) E>c@o: @Ѥh[_Li֧Jr -׺;ZXZaPIAxzyo.nV^ޛz8Œ=0)ű^;K1#a,!eP@.4n]Ɓ#N"|\wH*UGoJ,ryt?/ pb+_9߿gGHKgOJLvy1 *h\btʔuT cVe:2+r!2Z 1oج] 6Y>wrV/&v7`+%#TyXCA_}Fߋw\TLMf~0}!Dxw"ІQ qJE=bv3BO*vXi [( k4)8 ?nIĄEb¦8Smj*h5iť.0/l{^oتHzK u _?:{FmB 'mxLhѠ4pِFҦ k܀;3*Ň>h~$̨V]|Tmd)|l$i EIW'N)ZAf.DcQVٙ 2-B!w+y x7d6N!~U$d(h]gm5Iڛ_!ءǎo~ V8fR*QwEV!b).nK; *!i Vvqlmy}@أŭdhB2MHOX:N&ƝD*xN1. @WX szrlj':)@M헇o=A_{Q]$ c1܉$aw>p\PF;# 8.RN{  ~Cr[hD~GQ"X#^y|vIp;z5WFsbT9p"|l'r{AaKQdaUisWGX a=>+-dۧSa.%A7?jՄ Dh`<'#SB$.QٜrAon=wLqD-P_sjUl.|}QhuWPT$gU?f}[j7܄Nr:M$7N|a3;L+58n)Z9r@%B+]11 K7+KP"|;~ _Wߜ?G?OfCD)QXoǭG  k];ɝ.Br,l*rTu%ؔK@E4BHE !-$'s:Rr 71ck#())]EJŕtNĴ\>9& z<z (t+kw+^(G3Tp͗0W8Au?D3ʘ"i5)5^6`vI@t.T*lY5)%3h|r :⌟ *aDda W@(P .Ss&!>+\w,No$Vg\tgqoej >o[ ݕ1f&WͣCZSCtw erewZ3R@)܋.^~;S/!FͰ;Y?;SU'</rD"{4YqUQ9=6u,NXzlf Lן_$rƤ{QӸPx+"ȣE0X4Kȁ)5{>gcJ)ڞa xU{赝b('s ਊj)v  /YC8TnjoQ;|u@Oo{y*gSs۩W3x|AP2Ő悱r-+6Ri9@׎I 8$%E &punGL$2g2PɘyG *fMa`~խ_unvչ`Z.չU8\Er&Q!"~&:[^ʨB*\"%t9"6lQYp0) @=OYr*#۟(!/'t=}y"|36HVTgP\>}~urO)K%{q'v2/&OTuw>}a)hU$woW @ wU\|mv.^X˽GTkxVp?މ}BG̡"Tyn8KcO~X2s5M`N= S5?}ecwV AMG!~ěVs̽[̫qꏗ|e~vXrSRR@eCK*ܕJ;!C8Qty/l_]vWh0苓ە?'noCFEЀ3v^]\m Ϫj]:w;񟙽FE#%Vn\UǃαhwUۯIzHU.|[Q5MDR5m.UDt㴦QH9m<5A<|iۅe(YFԠDrVY]DʌM(E41 1b}XHaRy!M!7GK _k i!>i+y]P8jлMCU/%iw@㧯6@lQHPRTZA]Z:99ћWa#sc\*Y[_mHq GQr 7.7}$%o ʤo4A+2"wLjmaCrrDrﺏ[G}Y1bLLabB-en\D#z$4\\ONv3ZRsMcTSkT](0:&uS w\,V(8.ZиN4(h $i(hݳijeQiFu#iF7X=!Uѽs&##S3^jXpV ƭk8W^^|C`*,@~+qCF c9ɭ 웸 ?  AA.mtmb U ZY6hQgP r\dPbBSZb'E?Il8ɻ_\|DEҎo|%&X/qHlHH>\l~qުSmx 6Vu(bvk)HƬ78rH͎~D$=jHɒ=;ft2l{GacבLM*{9{?R6dSPIv"~䵳5sTR莵p+C v= ϓ!;?FЁzz짗)fIP)3)] Jt@JރcN9ͦ@=8,qky"'@!ж49HQZҾærjs$ LQNhJY+U8'5ۛ75VuGN\j 0 92`oGM׹Bwwvc\u(^=48~G#lwWR8[":N"\WӚqV^a%5/IZS8҉0 @Yk⿘7 @XCow / '- p.ۣÂ; OײN 71ie} ʧrmE3!DF5t߫8~qrzdR7E1 sxW8{' -]~ C}5*{Wؖawیo˰;=T}/JZ81i,HO .}ӣHfxfʽ4^j#!SFJ ǽ}̽T>=R2s19W-4Ͼ'G]zFJ!9zaNXbZPb;2 )~iHP{`9E;D0qxc2!FP: AA d`w8AGOQ>޶܋D1%fYG5F@y{ذum>q"qMLa$$qYF31i hNuJQ Uxf݌?+(3=7Qay>N*E%CZAԹFWľ_nv._wJ"_?n~Qn6Dl]Q9]m>k{Ұ:cqIXⲓȼcP8ݺ_# XxI{ez d?/XqVoc*&M"9,:~ga'瘫MG@1c`Kƶo'd?)IZ`)u=j Sת͛V!:~HA(8(N0@%’06`VD{uiZRSd8YtwrBM,GǺOG'A41/`>UM'%u1T}M&,ë|87oN$2OT7})tUNkU/mW7^c ZbOeULb0u\VJohzW.۫* H4[]q.Yte}{o _]WK2}n(f@s/dLŦҮgه;"<ɺRipL\)cpړJ'^e 4䕫NeG֍q6 拁ƺad*`Ϳ\uCC^蔀pq,GM@+mD拁ƺDMI:0upun}h+WQ/z&>n@b muD >uhАWu2Dc| C!}8d`otTno>zx~6S-7۶: |F<<ϵ.H`Z]pBpJcX Y*_Z8( vD/ "E,Rr A&TH!rE2Asr 5ri ʓXR凜(f89$M$4NT6T4Oy7ںg!!X##rlIRmR1Eq (cI!@6\$A 9M9N]PA%vg՞ckVB\jFPRDRMY2!W;FxLh#^ABB7f V# xk_jX|!& 7]<\;["@]v#~|^{9 0㡿ۮZ 89ÐLPfgy2:/ˑEhD,N8ck_vЧ v. QQ(-6dj<'!:sȊj^Q9I?rz̪9t$ ZH  ]r2ۊ!CM?NtAK0HpYQ@«^!t pq:H&%O`jVa}bu.PQ~{GǍa]E}~ݏQ=o\KpHFKo-t^=F9FB8Dyc" *DRJYYƅ"pȰnc? 9\\/nLJbpxP}bKv6q!&&R+U.V4(Jަ.TI$(N2SK€ [8Q3$9r)H 6-? $EȾ|{ۺ{߲"}\} ~WFſ m)h1.nUE ̑`N%5/%(-vT$_BiuX5ZCލ;q,% ڸq0t Zg 'iSp0 x8'7"|SIb+BN-5kI ď7ٺlO*bO-R \|Pmw7ufQWgšՇn]Ӄ ĹC_ & ̿cOQ5.Ț> =)\aͲ?3,-hJfҬ x0tk{?ś~pȼ1u_$ƃc kC"sk~Fj5/.R{X=/y\;:V)I%Œ4䇶+6AT*'E= :B0$Si?AL}T+À{o]vOT- J R=Ͷpa}Z 6MZЫ˹*_ w&%-+*+x_Se8v5ks^w5l:_$ $"K$FM8׭洂N~i46f"Q~jLeAmW=_l6^.w\a=~TJpYAt/ mv/St{]Re pA`<LA;q"x ڸ.+nvYqziY7}j)ٍץHK N?ř1 gqzyμrʘ @WQm"'4nOGw 6 $"wA?뽓nnqmtS ㉹f;itO 5cKZss&ܦ|zp)wcs/*@8||"{ir~m$ͱ< UͲ:zrxuahQ)z:?m#B&aC -]ʮ~ [`~0rPDc0$ a^= :sW!K#{on=< HyH\~ 9(sK4g LB@,p!GA>lUnk8&%t(ZxƠVfva:LcY}cqL]ygZ#6xH*"D6۔`SHֿ4!$ObNDO ''b8niOH\dL0D8GHcqhń)D~89b f>1aӪZ弣Z{}E{ &!$A5҂<5Yj% +T%0h=z_nᆮڇ0EPO%(ЮjU޺q=ɱwPRmr{tx} >?<*zK.y-HlE.)I@JOhGᝉ9Dv9N<vXEј%e@0! 1^jĮ3LwX)͓v/P1dPS\}Dd2(<:4!V8g 8j *ǹsp9<ΰzrCw{" M,Us=W3HBw~W&믑5TX'5qud *[o)<爆ĺ,^xZ@lso:d}~nKzK0W1dY^=>oi@(4v4d^^ }+4x#]Φsus0d$M]1Ӑ}-^̬mZ)mW8۫*4[]q]AfJ?ߧ%lyjߕ|س_/dL-F"z<?/wEmƫo0r;;m,!\Etk ŀwG b+`D拁ƺwndj̄n[UK6S[7)a<1pR6ZYq$Bv;պ!\Et @Ⱥ?b muGeGևrw)˥$ -s^_W)'}S9@D`QwSS!ENX.?r F62>l>#kIaPDh" (咘eȡ}\l蟺j:p8ԫwH_4/ژD ,H>-ؙf-1 'vj (} e2< k[qRT w-=p5W$$Bw]/'41+wZ,^1KGKZIbաĩt=r@5!>PԂC^5HK!]ks۶+y%~L>8ns:K;&A0։,6'~,S7IIQ>"Eb{KH?r[rM y!AѶݬ[J5*Ox3xר)҇Ǥ9]JN[=G?=K#C wl=Y0ox}Œg a_gaQ}h`3~\V墸^XZIQ?.B~\!VuSVPN?z 6MV~tB:&&UIIp0kXG':9vh9.K(oFp8R2^ud@7d|}|<3Ai糙t3kuBЙ]yF 0%>CPyFzy!%R)-V(dpL0(M2>[_/&N2]huVҩK`N7ӂۉ'z~'`?jI)8H( "!Yg9KT8TH8aio7]{r'gGTm{2;[:U?fǝWnr?]cʏ1!٪R-A($]Np%PQSN=o;SQ Sb.9gӘJD;S'Qy q:!JjC@Ɨ .;5b4=xvZ.C7d5`CN!Ox,3H҄F xd`,PA,)3˹2V.P˒ؘ ǀ\U]S9E(* ^ J`QED%龎yxIP"ʗv_TS(RYA\T ҟ!k'fsNK :[yjcƈr%A5XeU>65`arpGsw1ULsV )_GR+:FleRE 鋿>V䰥D Gql ZP#2$NR!C &FmUp r m\Teq{7Zk!OdyPTM9ۡn s@標Xx=f7SQ‘.\#܊1Zŵ1%LkC |!\qV_Zլ-tjkuK8OUwKJStB/+Lײٙ.M9*ҐZobO=v.+8ZL*Rq +/n m,>^]HkN0#aãIP]f?Z%" ;8[#󆃶"XIwuh*8x)vpV.(/E%=(Mr"ff&Sb cVdpISDX ~wGHXĚVa^0vk-b[XXS '*ie-1%6c#pUR[LU$b*M,YlpZՆ*EUm|A1+VU={^5jUsd"sژ(&Tb1G$b2Dqy)q8TmRW<+H!"t611C>Ee<3!6rHAlc%Zp錆*8cP܎dH!!ax1% H,ɩ5=-5$X)APQ*J?HWb|*JJR?(Xdg8eua+HBIF rR{Ygل^zA ( 2Hc@H$QR=YbCmldqDp"5&kAP,kg^SG؉V#R+U^X6BׇKP\gr)NJr&!WUokBNq[K0f.6g2@z}|p@Bûjy`؎[S_OK_*fDQiI1v`6+mV]q%^#_ hN6B#ODzhO!0nufbpF_Z+0o\V+js-G9'? $}jp?̿IfpOV!GV3ji-3g" zEHkԙ~]Һw+i4<ºgXhܷ^!llm4 4=16O\k'q?Gqυ&LDwד{2A^W)0GPV;8;GqFxz'mE7|å~s;sLhu#ۧIl*s'UBȦpײݦRBdݼ2wK̉@D[pjnJEWȨ{ȽX!Wl7B^{35fቿwtd֟^Ij%aI,mU]绘>hW8)i8v u"wFdR7c ! `X䠀KQJ5RRqEFquXZp:a**KH~PH,#b-?LBMaHuHW(ZS,LtB qJkZ,)[N),Pg Xؖ i'8XcJPV[ }E T#驎""p8{ Ԇ6ȴONw(m#iH>G`jH0GXlL,G[ \bQaUbGJuLȩ-r1u"kT|bCGEJ/|+E$ x1n dPI0NX'HFd$6 .$tSr6AS~{9X$r>fÓOtm>|}g3neIwN}r vl,1wNu>+OR v42$?z=w)Um6f¾k?LzmRϭn3J13 sv88VB/L`*_^Flj{\! 3āYHnxeIfܝn>f3ZqunúSǴG˞tnIg0f}i4^ꦽ\Twmxg&@/٫d o0gbٟ ]|˷o޾~, }"?8ޱ_~Ǟۋ/{w|;{y(~zOcH_w߽짞|r|`מٍ̌B@ksMa)Mr8ҝNnYKCTٛ({hd463C5c{ckgo&i7#Qnj;_vi]Hɚ%z߶a;J~K@pN]bZ)_^SW{b]w-m3ge9Xȹ_ g۴YTK}K魝W5n'~꭛\YK?c0sgz] |˫ѠotJ~.uOU햁-zn&>W_0U|8{\w_\ܼ?CߗcOOsw3|$_Fףtλw/0`0k;8݌K@fBɮ~_i `C,x|r=};{3Yt3H]L۷>M ? AɯU.ݬGfowbPcQ,n>$Gz"8("q$8#aaeƸRhLE8F/rc%(0 -̽MO|Oٛ3$4eo_Ju tt"BSfV"JKVRrPD<0eo.Ja-^>.%Pˤ u/U|tYGf"%F`\<_ymH9^prR ݿ vO\ɨ_F!uPrUmGw.54xPJ(Շ%‡U^UUXQԚ+VItepaw5 C12QT8bYnͱROVo2()&6+sW9R( ko}d^, &5-ԐOͼJnX),8k_r$gam߯&Bv8F޵q$Bd_!5M%YHʉ7?CJޤ΅Ck`&)juMUx5}Y ~(.um 8D ܩH<{7p"4U|w3.r<qN(KB0Zܗd7#XJƻhZىۦiMSڀhs*JԶ?~fwؕ󓿏-h[-3zhƘ"f`ID{.fܠbVSQ1vg'4ȋ9&ۻŗ̍gi|X^Tyk<Ϙ!Ṣ`9˟aJrLL麷|sE8p+"gÑ_ m1}hSGwn)/g7M:ghH%OgrD)$=ΞU*Z8oOAcyTR^Dmt&|O|d,Ӡ IAF5tR"%AHQ-c*Li3ET)$ڑRnVuJWLY &Y ϚG#u,] &1=F)(!_YMTᗓԏsCښ5AgJGe֒wF) ޘwtdk I y_OS& -v?^iCgp ʌ0|R_TԳR ql2DONܿ(__t*1E if.3)w /ޯIL?2vi"=n"y?hz~ ?-wW^at %Ĕ]iJȫ/FnlnF {7q1q @wu&nكRSURZHcr.Qc˃R ਖ.8+Vg$iexpHuK_H[K ׅ;B[t!1a8[ nF7ÕFػDqE91G4 \ЯqU!x0l\4@[or؄)`-D;*V&} .rB8H}.r ?یI_l&bLF%94:iLhf;Iҥ@<.Fm q6z*Yf xb sRf9W&$|_,Xk@.@/Y<cmpF[3 ֦չZ57= g@> yNHM T T>ۤ_!hٹ77&9P8|=?y:bAb{Ɍ/s9d}/GnǓV|hy'ss߬-gzsk}SpK#VAv\OS}DiRA>s=)&-,Xr!pkS-:԰nP+Q ^yGKRǞ}ߊzrn\gRYTkNze¶GovovHhedT[TˍQMoG+=0m`vf/fw6bף.QG-7LȕTϵe86gRA}LeET|PKギϋG{2쑲.,ʞk2sKFn鏇:By^|jVdv)=KQ%~Qb$䅋h%=:]vAԽGv>.uo[կlOnMH Z2DEwÒA Etޣv;] p֝i$j&$䅋h*OklWQ [] RD=hӥA#OLN,Q5!!/\Dkg7e4 | 9&vdܞHjJx~ \0~&8 5%b(B8a S:wVXq'4(.uL:8sYa3 CN, 'C0 MI MYVI5PJ:]IuscW^(=j$&_b΍v!<8OsnQN%AXn lpS!j&FL_ԮYiQ{;1rvQ;Ⴋ."EmD:&>m ɐ' ^ rvyrCɝ@?N'T !3N`H]vל`_^*:q%BB;K0w=wi蚅q[L[߼cnISi-jLsL0bF( SMhrq-ǡUB$²sǟz(IdHe߿?~: q" 7o6} xk&zy\caan? ؋ ħ*\CZaTxoP(э|>ɢswY@JT,( [0G!E-Ɛs2aC}bMbĜ:v-{v7e_J# :KS3/@t nQ, yUyG9ښg44jh i]}d4\OG|_U}|D!RB7ޮOThuߘS_~(e'iko^f9O H/Z쇑VC9-Y"1Js*R×x45|X?L*NpeȔs:cjJ޲}C˻4I!rI uH%˅ɭt2O?^0lRVĀѡgrQLAPi¿|8Qq$f }ey@tn?/p{X `I8pa f7fzٓB d30[V_&|Mbw{7W#OX52Eƽ YJ3M33X% YxOĒ #T2LT"%˙9˱F$*p6:(ru{#SO@hn$pgǂKrEQ@:Dx(GuPTL ;Ŧz{d<@MmyS_WꀠKOF)JnHQnRf\H/BGeOm/R8B2c_t rVlm/F*4~9xx~bq'mh?m N3yőj1Z rY9O s&d3r|(pB^#EFuot5Zem5jZ,>6#cWGRB]3}i٧vs Zc^9n‘uG&qIl{-3=w93^jTWQ/]̢sځX{ιv!*-w` ž4RЋt2Wxz@}fC'l~w3*fDxqR^*B/z \/+3._3 J=|^Iȕ'keV)Vd#XD:뙔I>j 孴dluj.~Ő(聁v"`@\$ ي ΨĄ {iQOGhj'<׷+4?\n+￝\|sZo$o%[ط(ӳ7glt#l|~wΣMp~A1F mxld5&T^P+DT:H"B4=!L 'Yաf-Y֫uR2#%:PBkRI-UӦcmYZQUb T㮿G3H1jF>ddOϱB)8DSch_3qrN(8ڜԀz6"B<g}@Y}o$''p~C3VMr܊R3 {V05kʔ Lws@mJMqfd40} tS |IFӇqnS S2$8owQo It9RM(E4rVqvjvYw7֬KuhWc#JD>$>wQq?e->-,bqA[;?_^Ei/V?'}X4sio>$ﯮI}̐k$.}8jdZ+Tpc~8k4z~RB{Ԇ1sP_^ HO"KѸ&9VN T^rͦ+=ͦW5v"Qܠ?Km[xZtI7;LgJqI5Ȓ5c@S  "gsFdΫZ :`hLF 2UjZqH ?R؁w|(Y ↣g8W,EDjrBk9. #l}*ZwZE"0 7(:i[FznOꑦ19걡ĹP'hsME PAsֽ;(#AóT~OR+%1q7q[mS(AB&TSq>  +OKVkKlEŚWǖH8`ދxU5WAek)i%, |vTs[\Rjk# pƌ$W"P ^ TܴD"NwB}piH1LͥF %`o[/_ZhTG_e-9'L9γ1ޠ+;^Nb⾎8Uij`@Ƈlء|O`zj- zVImURw+9jvāL(d&03hqf)ҋ2ݓ~"%qjwn8mHY0 `q 0TľygCQ!)!N`c*g Vdf|ږZ l`d+tA18XF9 y&ݦH-&`srSԒ @\NgЮKl{A o01B )rf7Os{) ̮J+ICpS\J `eLiIh!e+C7Prx~.lܘdjMߎ'čP8LpvnGE݄_.nnXULJbχ~GtR ̔E,Zep%xa{fmayD e{$h7-r52dqOdm4ɲ!'k22qyK]$wZ3i0!qwn{QixzVL+~1#@=8oݷۇ8,Y}`҉í'wƫ?!N]6Yʤ]W|, >#-ӴxM^^RV9PkX8v…4ӲK-9F|`9C5[L]R /" >$,!КGiQ h>Jba3t[rD PD^ƞ녿] ĭBB3p;U7o_ӶmU}OMȎUvaJRI=5.KHNQ)%U o.L+FJ 1Rʙ&f JNHѬN\H%зvQ]_~dNG?v,i&:Z]×T߬{WRm , 騨?WqT @-L|,;^C;FxSH ]Gt]7t u$lvh)S'瓿[\WѦ/W^m6??gS㪿PL\7}rVR܌L@l#_ǘ̈@X[/Wj-=DT,VI4ݐX̊+Ȍ_ -XΡUbJ_H^DlD΁ֹIQ9r)A lkHkUO 5SB5@WN*>쯡VuSBfYPB'y=`Gu%ҕ38t\{wӢVZnVNְ>%Uq%kRBڑF:zlth5VODK`Z9[[ԎDF!N#ޢvܭ#7jZtgPá z0|y*ay֑4hl7eCNG,FV0`7dЉedD]QmVSK +.//ˑDцsOn،p,5bՑJ^jVėHtKu"A۝f[r(c_# (ޒ18h@Iiz&&/ ġT rҨLF̾kԓvKwn hR ը4vVXQZVv]78fnp4Zࢥe$GmAuDn0zQt_w(zuAZ'tEP/fRdHJ)Nu?FDܺZT=Wwss|nM9]?i=*V ixNU kR:\JM)l<S sf)w:+L L:35+l,$yƶNVڧHS҉8,cC~t13HT|v:DZZUy =Nͧ,_O{ ,Ow\>K5;kjhcN>GΆq 7\u^{C nO~>rbіwvIhNmԪKf3kd;C&ZL-oLJG8j 5аӓ+OrZf7rI/uF!18(- eM^Cj:b՗fL[?(U"hkfJDM&(Qe|B@+Q5lj|=;Ui;P]_E|E:R Z5JgT܋&m VH*P#bI(AM`Э?ے@I=NzvݐhEpCBVFF$#Ni cCl1AN3 V2(?aM34:4E{Vj߮Ժ@Ka^R\T% FVc:h6FL@ V#@%$rZ88Ѷ<-@7Wv7Nu:u.lvVXj%srHT'GS TW׹#(eY@6D-:iI-NɥPMWY'#t~Zl:RDŽQҍ+U3P=]-L!ҏnD Hhlk+Z'V^XYW?e0]iڤ7?w.nc(߬Gߊ1U#I(|" c^ N>\ovGN.9cn$ x bDWPV0l>|W2_ -LCܱ_ou~29Z!{=8?qrP1m.j)JΞ璘> 8wtĸwYP[m(oOvR !!r%SG"k7l 8vK.a$޴[]m LJ*sv9uaO ΩYa; 80#4 VkX`+-$=35f-M *My];zT壷c29́ck0ړKR4(6:@78rd_9Ⱦ+m4~A'h> |w%8 h_gsSEĎRrRej]MvR}M'i#L`se)6I7+PcT&XgO۴6DdV5iMҌ5U3J$XN/`1_r̤DNz&&/ $YM5G*2K'ćpi|nVSrI}e!,pTϤH±Tݬ2/5J,Sn4r5?spaa:g|ǃ=mßθMsc<݄\2d'q{/w(f xx{~[7U=(l<}2;r9+ h;p{)g58}(jZ];(+ FPIy] Ζ[J1XK`D2b2}nw7롾X:)2&=@RkP[#ORT:)UҧXV<|җ'uW[Z>N~uy9ih/֠5U4HBwYK_oGvl7kC//9wҧyC<Κ>j:mQZ.X=g6~(_Kֻr^,Q, ,1x4,yҋ)<0e$P£_ *G>`Ou/Z@wEz ԇh cS6nbGu`-]˘}zT'`{ALC}=92e܄#4!`ek&*k&[ [j^ *{tn<"Kq1xoȊ=1! 9$[z48JokdfvJ^ |Eژ$-U:[i"SЊl$R+mlT;Jrk]E+ !Bn9d?b&1iAh R.{\vkݙֽLQZMU9J"Ҕ2vYlEv@Iۊ}[gK$gնE:ոhZpm94QP:g^7KvYc@h_sFDcFm^c65eskL~>R ~ZaݘeN^sN(B3i'*{#A>][]:%βv-4?i/֠csS FjرtHrCH5hIll,2 "iEkVJHi)!5ZS[hKߊo>*p*zR*O-BJbj#Zv$/OJ]e(fK)#RWmpҗ-I4E+ƣRit5'eK)B"LC-{Ag青n&]N"G_.c~nRs&o+7ݬo(u:`vx/[`ⴤ!fIM:~W}?1ǵlDȪ+9G['>}<eoqQlCft]0qh2oP>}QƋ[C#:M3(& ۭ`˄gO#_ȻaO VMdK(T ې GL IvaعuiQb ?ؚqte_=Z̀Ak_;mcHo~YMvU]}u GNtq`?_py΃xr퓭kjvC&]Rb y|$$YfSiy < q+Z5koLY-ReYU0KU>SQ[Wډ7]yH9YZxp2/ ^y 0?|U'[ʹ4_]\};C4e0IցIrZa4m{dܸd4GE18#3 IOy 2fF+P"TC0UHn'?6w-}R=-쎀f%S[ 05 &J'Sb3"l9Qc35Ie4P;ˬkDwU]/޾6_l/>_Ѯo~70uw09.\Jͧ}}S=|T>}~sDEg?s667w |e6/hwff}_$z=%bP.ۻ. h@'%cx~v ߾\]ZQ]OT+g]wL#]p/Ǎ#+YY/q/^p|"#f%MT?Ԓl%,EX$/'{W7]`H&xMh=zu5~= `HΤ ngegD-(+ɠ,zNio-'c;SN˝7FiI{;{JiCCej5(󔬫_Rm8VS{No[I (6vF.:Y_mXڲeI]\~]ucY`2E?ޛ[Sck~lMя]S~@pD&1w &y/Fv5]_vXv)m}uoΟ|ۮ?.yq>oH_>؆)if5@AfƏ֤NJQ_UP- Rot̥l76ݞ%LڜZnPd Ϋl@1 }K^,BmȎfJ? aDDLv by_$sk,:QP(O++ ]dyc}8za/('ס*BVB:q]n0c7ww7o^}zOR o 5Y77x@:9 ]xqٿ= [ Б +Pb)L ANH z[mVȨA*Cю|o i$*YƒO=T5n{Vf2 HfaUY^ P̞9ԔT -ԄЇ^9-k, M\Nwl"]ϺB]Z_|[\Lh\ :uv٭{J s'']?vlUn?ݾzj9p&SkWl9Uç ?n}2 oQmyNiHRg,?=VCu8}X Z*)~:S7R'Nf-Ts E46ِ[0Y.'k5rߠCkۺm ~[]~xSZ[+1ANn{m`5۳dLO%jl;e٥Mv =d8|G:6ծnƂ_f' [,W?]'2 *hl3)_?/SsL+!hv.?t{Bnw<@gnXU$,Yw|^ǧmG4WQ:%ctncyP:cǺ.^gZgnUZ)4WQ:ՙ;wu:;m,jTg;X"XІiznSh7uJ/pB)F }W7:mrk♻?(&Li r?n}WWJ/`w> HRrHuqO0i"0-oغ˵-+2h'Q ZI;笛g?gk&20Eyg!nCŇt?jeO>ڹ0~#naOz9>4W[y3 v!x/ :jlUÄغ+ծc8h,j&}GaE3&:'gV4>V4|*:B@8ǺmxZ8Wzt7VVz#.]7 %B|n>^6)wy+^_ M,Ov~ULEle:J6~ p8Ӓkh(-y'Ue lDgZ!TuoE5Y<6~2>)nR^'h̥Yo[>*ڞtBɉ `̙l+6JJ`Ū^iM'0O>Έ&ByshVBnr>hbTݦڢ8Wxz^KH3߭jn9$GZx M*-c`z+;YBmdar|,Ѐi6Gf綵J&+1;Ka0 l?@pX4=Lzni\٭r%Q&>f3Ts8L_bb:jXW7fqN]R%S9Vi/=> Xi"wz FMtQdɚ(A&gO^8 Ox+LJr$R c C11mNDrhGց?NڲBcv)I(I:5jr̦hm^v.I0d#RJfI"11H NPJؽbcv2Hț/'ȧ%0_":kDL{ɩs"1Pi3o#w/mӇvʞ)rQSQ"HjAM Mf>ByQ 'da"el F1 z!+6ރJ1H{Jxg5fq|+v`W]AoyZn3^r؞w@OV^d.CP-̈́/ mOiFXSBLW1^"ep8YMu$&*{/x^iā "M:jvv..!h5ztbET/{@{٣b'iN9L}@'"+1ȩr mɘP`| 'nA]S[ N":#yLRyJ $ޢQ(B)]ӽYr"`ۂvaKUl1,4{ ]hYq-ZWD'C@J//IW1h]vYW*H_ChHOV %)SaEjGb,nUMI5jH5IS@_Eh)%5x~Ķk@4~D/m5:`Ca;Zb>5ʄxԩA6c(>>/bZ:熚 m$)LYHJrY6d azq%iA~`'M')gz_͔<7৤X`;2FƷI:=,IvIlVTwt[*xC y.TO77iBJWv>-LDd&2f T?V6!"秴>?>W#J=*:rHYImch|+To燆 ' Yv#6gRUT9ooZf;О^ЩEȧ3Nܞә{:sDXUhD͵X1G,G8LLL/ Rj)Tf& IrmA.VOO|+R fL19~)oOQ0o:5~t&1 '_Wv"LmFݢ=~Rm']D='3ל>G"`wW swOt~אyAG1 Fڤ}< `\k@Zi'~R'$gČY%>9INrs8ȏEu0>+K><:S૳ȬE>{?-Dd>Q^>=:wOc.U]4ߖFߑF`G"Ag; b@^:` aG%=uEn2}vx+gG]k]XzƀY8{k,sl+5%[%[{.h-O*R7~,GxmX& Hi ^GC8h1VDfh-1.[]ྸPS޹X͕\TKVy[pbZTr䲣?Gpʭ|ʺL‡$:`%h1c0lÖ]ĔV)#0; ;X,arg.Zȅc̺_\m1Vz&I\`ƉV)gTde߄"B×kFC_~Hwm"68F_Xܙ5@d%<^Uu_ \jk4knegc97o`&mk6v:?W08QP`lB[MoBq !ץ~Bӑ)ئH0M fIb yK0$1h&tƨw*ӗm KQ-J)MRo9.unߗw5`MZl{)+`Lj魑$btQ =PSAXk/G+4[f0=Jp5<%4p3%|cASFX:2a3&(W{z0 ֹ!SWB|Jғ@I%V<:@D %-jL<87\ߤvƓkOz*uHi.Q0[-Φ(?/7߽f2ݲߦ',d )|nNS]^/g 3*)24YfKRl`GNȞbC,pYe+ne]x&?%9j߇rbřHYpJV:F]I{ Ǫ;[Q*5xYp9#̂Ypㄷ vʂԒ`d< B - %ڀ 3ޡT5#! Lbk,sFFVʖQ@fUMF +dͱ\v٤8neugc7M%-Pc̒xST?e9SǧNz!Dl{*wx=N0r&+osCoM^x{zQxU+Q1Sˊ*Tֺ ,r'![ydz=aWhk9fOOb1Tnƣ~_`(wmt:T^lіx2y/6}6f79ԝ>{՝x8e"/I6]ߢ``,z9\L7^^y/; Hbȝc[@JqyVu:E pQk}txeyj N[̈́c&xz1XbmAS 5 suan ~+Zr/N.6_&_\$`gRH=F`IH^([iWE]sI60B&"A;jeJ#EӹCAS^xČTFm^%fP1WǾ32ì}k+僬Mhw%C&B0N!KL0J"'}'>vϲes˾? 0@4r$`10A#8+CR"RCXtQ6d7Iݍ= t$ۛfp ^v 8TB B jS@@&ޢE<T#)߉8u)熨 P~[QʸZzXX-"WFJ|3jS_Y>)Li\,9:BTc^bk4!R9˧u震sK,1HNO򨛥K}Nq D 4J+!N9au*3I3HӀm}/.-|ztpaoi9\OPю"%?W<+Ss0\D'n凿~\ufBm4c7Ǧ!wרoW?_˛) WV/JcN~) bŘ]}I~{ry3F "LN'|JX?1^@{6L ][6\0 Jp_@|41p~F1UT \Tlk/̛"giz~GtiАlH'ol F09> E(9qu]H35U-;~a n`;nJt+ g锾t;fIL6 /kJR*̩]Ym]c`;|1Jxb213Pq3p0zގ}*=g*j1a)*&ꗻZuxT neXUCu+na{05dG^w_<Ѥ_CIMdIDKz]Zq~"D}/lpA#%BxzH*bdP.H^J۞p썮ۗC֜i'E( oFmO[)/5zrګOq)*d0PF7  ZJ\ zNEluZzx{R6c\C-I䔻T-7H}m<‚@~(ht,L`7 ʔ9x[ų*M;$!PA+kꊸ@u R;eïf7x-̦C7zW>?2o{ :Y1D|ϩYhY#%Sr/kiZV4LwM5p11+ݗ]gR40 F&'}3:˲ZKjPo#b8Q)gtm~^\^"xDh;8W5FֽKaċ,D-sL(/],tI#c&@gvQ-:jratyڝC[3ҜΐS&G KƬ2g6'n{bpUS7GT|1&<ݻhF"SHi @u+D紸AmJ=.KJ,{5Iѷ" Sjdcq=3`=om L%Y~^NC[:]bQ:&u]30i8|agf1Xair32T8" F4ZY jEP­ |5o P| &k+D}? ɶ4hP[,19A)1EQ"g2g;MQiBRәz^^%0MDOfDkJo.Ă7$$N\j_k(]*1aCj7X#]Ij׆&dStsFz*1:ۀ,!ֽn闅#g6,76%R\CR,ҩd}H*|Sf7~ҥH')3I8S`\ORĬiДua@atWy6w`ʩ.Xq8`LX39x1$ rQBdF#r"/~!)gVPYܙ4seN E (2YDDImBh)-<(2+;TclM?N?Vyʄs&-NPun$D#,$ !Ra48ӂq. )X\cBi6Q<']T3c,WȋK)2Jga)Rɮ/^&Wy-̸0[QW<๫;"ncƫb!$/3yweVfBF!Ƃ$_`yQ$-ܛt pi s 1FzWo Ϧl4q4>E9nSZX?fI.lH.1#w\1j7m"\q%yG%=F]&oN>S! 3lo5Ow`xǨ!Yk1!D=*c'{ܑ6~z*~zu[vsCihhU{f=ӽk\4B긢 <سOe1[Ѭ54$hT: ŪuudZ;oI*~u[ *o+<]en|i~!Y=`"AU_JN dK )l.ϼ0;BtasjjAB[,ʍB-DHÔk)RKVp/;(p+{wTV{Hu0#ADͫ@}R^ :dpzP%ąt\?{$XC&ԍR+*lZR'GYS&G5j>0g @Xg![k%)R3i|*^qHb"/5&(֗w!&QGs던(=8 M4ɦ}<Z]K *h <Ѯ%Ү`!?&ٔ@Ϗf7얊A餾67绥GnmXnI6EDg7GAA餾66$GB6[hM=}cu0'_h.{4mb K< ;nTh}._#Ar.'\F+ej _]9c! FPN9ct>dbyK,f3P1Ѕo~ໟ[ҬQ%ZXCV 'S( : l O|QRC}ki۶94ŝdss`_x^ϋT{}iaOoc2El9O QU:ev[j|߱vklS#Quw \Tce jKȭ0BbEjTa QrTJ}J &#XtS߫-a%XD${"M=Ӏ?|[i3FV>.U[ڠzZ`+D^s7LNr>8,OumA;rDIwkD/Q^5tBI })QRF>%TKk*C%ë+,90j&*xpݡU8,f&?/J Nie(xh3M /K_"@Pm<)c>$wj<ލ n$B 6bLq^ c~ӲϩÎY[2}LHцb_rbsIX qv{Lmڟ-AH_afS.:rI_\(!awŦ~@ґ;y}S$(qH\j!]Two~df2BNQk1^Z%bV|&w7V&8kbr*8i3U)L:gEiل'EYN14QЗ!:uЄ=.Mo-ϔ< ]S!aǓrFigX.Y(cy+`*o4 ~5aUjM~ t.޶^C+J@{ Ol۞/{Q>ǺpD-~qcJ ~v$uui$Wؤ"~XS0{e"v^Ih51M8XSڂv'g I$t;F ƝAa2>΍Bxv8?4"jiHKXXgeѮ2]q2]QWZ\.ZKRiŤۘ+Ut ,9PiWL`Ez*w.Wp,yӰ2F8xC  SiU~-<!6 ;EA0hx)8'a i׵=7!d MƵ_!t!VHՔ Uf.V,9koM7|ޯ>'a9heA[](fNYt3cSӼZ4@ft)BH0xgfאSo>=$-fg "70vkX_(]g)GLc]7_*QĮ^ěߚzƇb/1I̱'i鏐D}7aUVP,*J#"]`O8[  ™͐VeV$R5,lͥI6-7!_u7ejʌӌl PMމ;T[0; Q -ɹTdXBaM#c'[P]_s6**RuLռ\2wuT6[S u㱴ybY\qZ.KlƵnWVHxp{{Kݔ;*m;e{z٪ geGdM=9QD}Z2J *t UIY&ⰶE%)Nw"XF}.Ĭ BN )\YPP^<7dģ_ |(W c#1 b*.vA [-# ^D %!X .xv)GW(`0*/zQy,Rʏ @a+KnPȺ ,så,ͥQXSIl PPMK`E4Eِ'$P^ D*ItB.:e,9+8`D)('+ Ci_V&IB ]C:@muOeblu=OAF4.% j86 JMKDE'uj7i5Y-@3I)ΝkLm~@ XQ=0[6M N+q'c+2νq1"YHNmvxyzM0;j{;ᶛMDt;/ @odtu-?1e'GcdOG#X(`.£ڏɴPIRm&J"oǠzp&Xbil8Tx`| S *a]GPT}O]$81D^tၽm*2)Fč ^aA K8&691T0tFbwN>Z$0>ǽN!α"ɣYFPʄ VΉ m"Ɋ(:3*|5O~7(rK_!qB!8j 1fܐjQ07v;T;(S\ ӗ9fXkKXbO7~P@s0c$JCFvJ 6&k%u뵣Ŧv!xqDheO \llsoG! ohs2О}qBAhC!yn2sY3Yb BF<7/DƄAAD xAaH k$gJKj *wiu\FI GD}{޶s(?y7tı΋<_)bX l,*r@B\]##(QQ\}!g>\1qAp2xL _8QM:%c*F=X(6.']Iq6={:4z EwDs.#*GY]꯻*yt(Ơ>O, P'e^\6e5\CJ8K=_ܮֶt<[%s?G/\p}.\r6ϔs **g"sJќ?CsFVCQ_|TnaW탊ڄz C{7ꝓf2+_~JVLՊ:E$j2uN=zmH.0Iri jե B}J4s+RNsmsa93DmcTfae,7u>󵟖_˝So}rh$׷ݣD(GĔUqGXuyIv|VQ<* [Cd7EإsS̥ (|r3+\k}gf-_}DqgArݭZFѮ~΢g0 Tg>辺!zkJI3,¿l/^).uf,I ,{%͌h`S $q+:^G)\ C!3,򲄌9qew/.n&]zkCD2!'Ep\Yn 8yXWK>Ɩ0Zs%(a3Y`'a]j d ͱaA%9189|ť1Pv8O#BaD2R'Ƚ0qBwJ M0-' T mBlIoNT1?\v-M DLȬR7̣OV+_93Sy6-Nkf/dR_5p3V*IJhMIo9KSf}1Y6w}m50BL J+ 8n8%7jA}]&;)c5 $\v2oz(ZP-ԠPn@ZJ%0[u`X ͜Y´ew(=Q;kCanY@qȖ؀ƙ2&طX f;*]XtPkV.~2ҷGHߕ365{ lD-y%j9H>ͳ+g bU-;Id?}žDLJ{{\N?onl-~E) }.OE^8 c3|1XXcS^\sdd&cԔl3T1"Iv8^V k9!o -Js~ ;]Zy 0DMMlJU {dۿM Xת {*+W nlm2rkl6Un #5s4? ߜwn&mš^poG0)Xīn0DB1GG5T[VĞZmmFݎK|<,ttK6<(IbpJh]$(/@kEBp$)h3ed@ә(yT5EZY]Uc]vҋSz FGw_pE{C /6E}gk{ѡ.7šN8|z+a[Az)ۧ{'O4rhѷ9J&;gEleӦ 6;,U/~RqĘ?nf]<4Z}/&NZՒQR`4=xOA&%gd )L^{Q^^gu2O0;I4W=+hx u[9=ůpSB $ߑQ2+G}hNfwK߬tjOo$ImRӹ[sr 3ف!HI ةcͅ+#-m 0Kn |, 3uɂ{l`N[#L`Ӏdd˧3R$SF Z٣#I*'0\]s*Ux=}|E {<%S{vhkZxa [1fȹA_d/[n|8B\ַ˹M1P7?f$݁8 cyiļ(=7FL=#.#DڡPOgfOOl:"l۳Π:^;=t[NJ+[O>7zlzxsLmK^fn/C6vR;qa&E[+=-H"% Ajrbpps0ڭ7* :z2ԎH/?N<폼<S'MۀwNFN![x]XO §hМr[pT qwWFD\Ha-Z$;D>boN3xxO* 6;6]ڟՔ݅$78IzTAK7ݽD},|fHWه{I1nQ7faanm~$+D@"X,$JC;+Dx8TAfC:L. !xI܁:e$!'ʾaH<6m4&LP / @|LfLLJpF0[~ !'ٰJ SR!JFnq2AC}Xt$'HO`'j>ĭ4p:3)`T7aяwaUǿ "}Fl3Qn.Zmgf-(2,sן>LA魻n+oP,)-4{0?$ "n!TS(e`ȡ@ N>LOME s}7|&@"`UJu\踜jctCv\8q[#߼Z\qY`3R딱Bse&P͸0U)0 6-㥂{ZbQ;[ ٻs6Y||hnS8$ ̲]$)$4BH!S\IqLq`a'$"ʕz!Է2P$-azA_-"T8-O+z>"da'qPё20oYorxؿ[%/NA yĴ.VL7^2y% |(&.ɥ*)(.wM)\y(*w۩c!'Ǜ=)8W95׳iϵeo rzJbWSۯs#zX^=<%F"ݫxDcFҭ W绪7zvRI$Z{K9W 4 :zp@^PC I$ Nsu!md_X:|օzZLז[a&"]|=.ov[rnqVZ]q;{|ˋ+Z=KV޲f4cLIad 7,A45rTeɪr4/}=o\k3[-B/gkk:sb $6דzo\OS'deMmsW-+c x/g|3f4(5wAA Rc.dB0Θ z;HRiPq9N ۏ"Jr(ƞҠFP#,ʜoQQH':A7@8}Y<@\{`,C&s).}ٛAUm!heB'?v mu肹 b1֫)()w2TgҚc0I]/U/yԼg=7? w[jC ʀ1b+N|1"-{۬}G(k덙W[#Hp #{\}ٻy:겷D1tyb}4UwǬs,:jaaKàE{x j Db\"lU2:.F/-$p "m>B!( 1ȲC=VUɳuǣ&Ԛy͍rO 4Ʉ54L ES)J LgFY3)Q3ʼnf(7`/^b{{$9N|P/ez:ܨ-EnBF[:lx4k(7] nRܺ3l~2)"F^Uft Fn!Tc.|ht\ZC2A~Z,8SyP !C2Npj6*khiԚa#$p @žjl\>C8.Ǐ]F{bV#\`F<^{,ZZ_^׼cĐ>yw{(73x$bM9<]dtm QNiiϴpd_N"f'aWx#fq#j9 뤇&UP:dϠCt*vyH ǵw HP&hsl6@Hjr)D)K8+;X3E1͛ jٜB+Q#t¼tzvü_S" :f/>#t¼]R~Q [NqΕ%^ߗM|kcV QإlozyvAhbBD5԰P>_08D38  C+LITvB)ACƢ'f:=le5aHˏ|~9$M-3mhM\GYJPQr,rS$V[' f֤_"O9OMUbx?y̬1bz;uО`By9g!wY^5.#_7b"Be 4OH$/"u}tkc{lK#M5U0a 52I!LiV@E+1xK ˆ: +W 2,w| QS-gռם _uul#Iѓ\a@^_(8W)4 TG WU#')=7ZmJL(|~$[kS uFJh gb;ҀZbx^l3%i!l fir@HHa$!xRPB(JrG/mߥ{v B|lON>,SmY#4wL }PeΓCߋ%6H6φw%1\m J ڛfS}lZ3izڶ|z^rNr?9O:'/w3_|\y *CAKAy!QK]ppQk:@!( ` f5|nyiܠ2f0JfJNT0D39)ۻ|苂̇ T8)*Vi ?-lă|g6wv%]Oӓv_iwϟ~n |`߿Eo<~J9T!VFkbG]+ xU95rX[;SS>o؜W\Bvcv!'fmXHT71N~`ƲlUgD'02v49x’O7;A C9cNTE`O T:aN9Wg@X&('woR1w.y\VCUK3kZ[P=SHq8PVd)M ЬIL)@Xyg-)dGo6r#"aoW6Y`qKIl=-{eya%Vn$HfVW7T}jjBU-[%RCJB=.H3A橷^dx%YN>lu4bnlYoQ rL6uo;X_>VZZNojbdJD)VPX>lTf;^4Y@ 豭/K I@DRIZLf~5 X93:N]*ⱎE0郆&D 3NA3LA -Py5`Baf ,Y-d >^#1پċelKȎ:%#K-ܗ[-J{͸6ّR ;ZjM*x|ؖ:Y|bbB*֭ZCI,4sWb˳ŭ[)Y&Y%wnu|<{]8SG W?kŗtJGr0tSa6jv=Ii)qZWYdc\媌ٟҜψ'*5]3j iWZ:U솚nѻ Eu~G_r?rZȹJnMhW::% UNnPӣwA*ޭ'rZ85߶w2ѻ5!o\Ek;MR6[] RTwTn}Hd1ڶw0:ѻ5!o\EkTÉFcf4j PDԦvx K0ھQ=~cפ%(&*!'qB 4^1NF-<8A1N,Ac#>ㄈ'qB%=8Ak5 cЬ%O/NN'qBӋ@I3 cШ%hIN/N0f8K0P+dc0 ZPI@GYJq=,fQ7W?/ץѫt_b^(]:`h25TN F"tw >1QR N }֡5& $NGaK t鄎z jr \s*./i 1 Nc@+cȻ!->Ʒ+nBX[d S7VlWNd ˟oš5E#^ <Bȗ׌W{/ 7*1(ոxЎAbsXd~wEZZv_7ER:I=Pose2Pipds)rI `h3eӹ.98/tլ6id9A7W8[=*(rJSy~]uUp {qГG^rCyk?t&GKk{ 4+U ́_:E 43'DR\~02  vyRY߀|+Wxܚy9O崇p>|-B?_O۫{|R)OZڻO.3 އXR?XY^eqp:|[G>=H4_~Zn7W՟K~xpgDߢg_j]<|EL^ỏ"\Nft_k+I2G&IϊRD% "F@I=ʹ4ZjPCrENyP;JjjHml?w7xn|]W%UDB6U‘ܨ3*\f@. F;L3@Q;#jRo͡eq0˿jJrc+{阥AҐIrj-\PJ 3TV=F\ꆑJ:WLrFOօAnE2+00)-hʲ@B(DNDNطAL*[?ͦwiY;fs9RPTAYwm-,.fTguPtW\ 7?c@}_Jd@t ,]I)='2{W(5BG"K\IZ ~ Rm -k(KQPbh1_Bobj庽ۖ:xŔδ\𳋭TQ] KX7NVJs.Kb:#"#L=r*+7eCu(?b}< ي-Sͩ-?oQ{Q 뎲G[2׿&@DΩ3FCvB^(~-~FWF*ٌ i9oẁBf.~!ɡܖ0&Dz߰28Wo+ʼp&7,h0ǵd>wFȲXV %&/5j; ڮI>s> D[j]`z&[mC;<5Ug[p5vCD\ Ŕn(Něԧ)9u&,:X'QU.bQf2I_C}% HNr;%L D7VXrШ:Rg6Uo<h- 5!yio?mra~z:~ׇ݅OOz*?Z el%4d?gV&~]!LM  ĪQ?$-pu5< !V?segw_?ss|> X&r80c~ѩ(gl>XR8c&Q̹鞭3ʝi# a mR$ZvnqY q!><'yȄ]94"'@YbyOhP׍.ώ.RA^Iܗ\q"+wj%f .tT8,ܶ2~:Qn4̔Z-*)hM]K$ BŮxRd1CynK1Z ζ o7T'}[E3 Oxԭo?x+]WE~|]Zon|$5 DqvkFPdQU(QánɁhC9ߴ,IJ.CjY Mtwb-]!]Ar! ҖS N-dBU59%Ω,%"`4"J`qwKbɣsX"C ЇO:tKa48ʈ%+YLTRQ 8x0 fmb@5^(EpytﴆFDu$Ú"pjڣ18ɸ~> ֯GS%1? 4yb~}ZC&q71Ix2&(-asBWi.; 2E ε9jV֧.woC8~"RňUn@a\U3$8 k$NyNDGb+dO$R& F 09,*Th꧖ed@ CԜ;*K_`+˪invluٌ^ C%N Dkuݎ1ĮH'%6w;e%5rB=UN#۾sHlhtC -7QYjh)qS@V $ < Jh7=hBPq6y(4 D=j 2/P͞~c*rb\Ϗ׊X3+ʼp KR[/H$J+ \aKZ35h3ƛKS2),ɜ't: :(wzH9B9`&0)Ke`h6:p+e ƝABFt)>W͝*[Un{`ʮ_Ty*4s4WdgmltmQ%ɍ +ǫѻbipKB_jt%r\΅Y=[TL6Nis`X1[#.k~ȝe'I710A"F@b N!YK `IPJKN c%uGV]5)5 !Pb/ʃZb)MynQj{qAEx.W㍞@ Nf E\w `s΁a"$y< b~,lԂOuEau}(=Qugn@x)DC[ٓmK2S=?Jb\t; 9B݉1 ¸$V&HE|A-hgh/0ra!C9Όoe:*=~jP>`)9݁Cz/>OZ?[i~nTN_#=я;Kg7!9aj2>?7 }y>؏~ DTxst Ǥ~?y&5+NK@S$ g vƥ ig `)󃰒6vG̣3g/ re K-k6+uQv$FnXת[x&.<}pۃ V7L%}Gԓ8h}bpX!Y,^,*OvcNniTW'<O]xi%(}轓\r\;`e?qO.M,cإrqg o.O֜ ~@ha5b((T,Q z=X=uSqn<e 1>:3uxٱm(jA2XopB}qbQ;^/|(_=~a!Dv ·4(U T!bpwn٪ѧ'`j6ûW`U-gmțG2ߖgHr-; ;OjnhxQONiFwcP.{wb΢8GA0(B#Ϟ%\5@I@q)8mwvaRgjuBAu6FLV ~̬SL0k=7׸]Mp'8@:Ku $U*24(U)J1dXi,AN(<6 j[*k7yOilfvYSP1 }5peV/kDqʵ@ M P%+NeBҔBז/`4E$ yKjLaVE>:Ԙ)>zTpXJYKn>k!"gh?E)hk` :Ct~ SBRCW&}Sykj6mVO޼X\OY.k9fi\|98ݭi1د|ʏJ(2BRqdI.(&0 &X*$ {k;QB(ѠJJ_oSEcA zE*"~8Z2qS{n6(Ot蟺& jx_R`"/F;?kF+/BT㲈Bz4Ua3T۫\0-CSYcYCW2oYV~1c6}FjYcÎف %[ǀ3z*eRzg|"OvhU{R(pUkɰFc#P~!pGH_tǁ]*׈) ͹~+@ $'A{y^~ j#O憎UI ?5ӠիYC8oj1BνW nLDV5hEpʙ"Db :QQym@9 HeR5 D%vcIHkC,kwbq됖'/l!Ĩp(Er? X**X<&B-c$4^|es0ᒊnR ꣜i3Eǭv潆y?$!T$H' Y]u9}dN(Zmx›u͡­@A)@DXBlid;Vb%ERR[a6 ZH1RԂCjf־ń)$1}$Ra3mvB!HB $b(s3(u6%J(N|"CꠅI0ּ,Dp^P5[xZY67Ҝ` BS찒H8R,IR +ﻃYBK1Syz;4)`P4H쏐]i !&bl}Sb|>w_5 ^m  |IVg19).! }5 sH DrxpQOcEwx<=/h\>o7G?N/h}45_dYynݷˉͯQb+Z^UOdǵk>62LW4;y'N;cӮAjݮw=ON-Ƴ(4R7+d{| _쉵JA#0D0m#E)h*cK.صڑhw3.CZvo߆[ʏ_K-ȦJ OjO\26Gkeu} Ko-\'oBH@ 3vcdB7zf52ٗrH" h$Yliw] PIJH[5~/Ʊia!JsXF[ekЎ */j(zGNƮ2 @Hqxgm߇nmVrzݼ6'N0MD֝P,0 l8V¡ NVo$?8zhjR;.T6zOeAcC0"(G2I %'z*{PuiF#ME,)f#Cj wftt#x1jy~ }Fhc:xZXCFhSqהݏN 5,IZo6R:2RZ|fY=üX b"U7րRvD֘.yu@0i ]јr&tp-j*`1ê/{䶭J/g e/S5[Jmuy9)mm498_f S=׍Fw}x5*9&LJ HPrsJdb)$I'BH)j*tH~y23m6'P)cR)*E %KR) ORPJfq2ʀ( kRO"*R͋ZB Da!2K!H\ %BfR``HQi)1XĔ*@Ve_7,˿.G SJ!jdH|i|,#(#sRưa1(-\/hsͪf{Yz5"Vzd^3- Ä(c{izLVu^ի`\7zO eeg(οgQ1Ϣ,<|^B>%ԡDDJО5K5_X۱h\n؆Ki6UsJ .X=XVbƇ]`.4H)e,6 mk ({E8B S)3I4HVcacz-6¡)_G`g48*̌q1ص-oV]1HΌ7k]sycxgsx;c_6~%6l(]ow4dŇ}&P ߢovYN2.^[.O~4jrx;FiF} ~|=6Jf\my~+LD`S׈9kc2ۭ:=05 ƣxbtLE-Xf,;,1 'G㑉' aY EcB/ߒdEWNƞW^щw>c8kǏK}\ؘҿθ]ֽ ~E/d /v[ qsk[gNCl2Rzqi m2wSI zӅB;4Qz7a|O~g]ƙ;j/k^;Y8moeWRYyrMmx:šCk4lkG$4?]_woÍsI0bddbMK3*iPȫdmR s'>v[ NՊ zHΞjt|jńph<4S+"V^Z FAPYV4iUܶŸ[Y%q%S W'JAh |'PN!OH j `J*G}ZRaWت[P6 [ ~`! FtbH8Qq$Ă$Q\Mq8UY, \ C\B%y@l8-E$ ȟʥ~n0Pez9aJd9C5b՘ B׮K4k}0xzv-ԟ/'"@=-"7P]s Z?ov;WЉ !`bik.Lr$n{_k/_ N^ 1GMЄŝjJmkwy>1܉F6Ș HgҹܧY8+YTԱsPe;ne%TMjHEl2ύ@-? ڱ Ӿ~;%^N11P\Ue]pxrB6֡\~atAXd@Pu|MesygI+RyA4>DR/6xGk)Õ^|(V9h S =pni?x!\ 8mo 7 rӽPPó]TI) 6.rz񡠤NBv1rVtkحˍO-9ƦJW:vxT/ V(Keֳt4XټIW0FnmX*E;6 .Wq0Mي$E4HdWfuWB-/ |G)Z.)oPj $E4D8}92 - }G O?)%iluOV\ɔ?x;nsE脾vk0T8{nJ<[r"$S F I0hM+a$vP͝??UNH#9`7~[ҥ : PJ0ONꃟ0 UJa3Ot%,YoOr% POt%goOJWa='R;?[R HjjBz(+ZC !O Y. @}PՁN/Z"Jw)Zgy鱸-` 6tV`K*AxPƘɊ}9" ʥw0^U A'@M501})ϸFߦIdgґk<_4=-RmE.݌֏OK!ǐBxy"6}Jq3IH"J (uMQ*:iE{|Y~4m`\ FeFɁGV,0D`/uhCINl*}hxG9#OXbG{O~ïbkZuly6H[mq~5}?P@d~8o>>fƳy2:̥d[bk*k!! JE{_iy˓T{=a pBM,BcӍ38J% 8zӍo:dQ)1-<<!i;+Q;$8>GO"XBTLbfDw_݌g=_Yqnܤ^O첶B\Ƒ7c3E$W 1Z4Iy")FA 5(cB0J c`(&伃{z@Ǩ&qP%sg^фKJ br[%IO(ĘO<ܲ~o,`LI]}ɽtS'P)chԉJ)V)R\-Y?')a(DqF8A Ar G&șsy N xb~1ayYi=ӱ~=,?S>8Jm)r;~9^MWW+^m}8y9y1Uf&8>R1N3(aF֍YP$2Nֶ?r!FOـE޼)c6fe ك³%_=ݦ7]EE:NjildӨxkx SV}wu }tO26o{*,B1Q'Uh01 1ȨMȽbc !ܜ&Os늮 j^R`k6jkHbf64t=gd??ɇe#8d =Ja\mda "2My4\)"1T`6BqfVwP/Е|$Nl}_Q;5֏wt~[-b{ɯ^|fm덹y9{92~xbJ }-zfy`ڞ훏~ޛ=$#XpZo_O?X˂Ggd M/Jbyڪ*mQJ%׀i+)W8{}/ NL$c׉jIi&/DL@JHHB&'r.LH Q!.鹱 qɰH~{D2'%6׾ w@'&"D;D$ˣ0, R 8R&":.kRLnVt^q̛Kr%b&Y(l:Ϣ!dYlBɖn:өCLC15GgӢHeun π(')s=Q*^DžMH֚eD HE ׀1TT<~|/q|^PbOvE jͲM8so+{;:S腺b[ukA {PrʏgW1ܸfش d ƴ9<9EתIΔ?n yPּKj%z[ '=Я{ȋ?'cMvow\5gw31|#8JCʂbHgr&eG]V:iM8H"-%( J 4R$~Jg+o|.xO-,Oi 61lJri@lMU=Q/~pԙzLO u?c=ߟG實,l&Ňsw_PރGo[i_5AA`yc8$88졨!+f\`uuRʰAtX{9+n0tPTU|ΗjrjsMZ!o>5h"n}- Ik<`+?sl a[8eW*+p#yi y8g )BB@h۰r܉+ h9ITqIXUxW+zWm8\ J=AkqF/\N {,6{6<$IGIE+4RKjǶfU,VU>"{ήiH+h~e++Pϱ:iE/Cp##=UqxJ 1)AFST.Q. eE/=GK8StS)'BGUT7.-$g^[,ay~CXs^[(UN &bBIaJ6M}Veh|Xw.+-b_-b_%4>EHQki޼V6p<׉S!"Sn\8' 5[*tt I zeڹ 0X9Q$q`ӄ2ڄ w\ؘ`íńa9KBF)1pF(/@M913 捣ʦ*"hAHSGB; +T nSI$5<A`m$n'nL좒V ۹jݔ2GedrS'i] J!K9zL&"S@P+4J v`0sd09,a:xbpjaIf)G(1Hz(nI~G0j¯la>~5lb/ @|3_,[w|wd|KGw$#ȣz=}7`u&y3xy m(8D{ί.̓lɏ'0ᆢ^A6?do3%d!5pӝ绯 /H 0$.ԻX)0,~1M|<[24xE~0.+?/ 8E"pX-0,?+w;U`--YjO'KR~YZ6Ӆl\"p GDb[TIYR}JAs([p0כR|1ajn:pe4~XG)CRdfWTF:C C>{$W}v5yS]t$ђӾbL׶8~K'gc7.ϠΞ6`qЀroFcqz[-纣mؾa6f|;l41Λgb?/AC.. ;I067.>Fh+| +"l}(&?N2;oKJd8#hW႕_j;8 FRrTMT-/ۤ]3-ND{'=x(L駚ruF!bင]v,'=Z|T4!Yr2ah'%j\w0-r)MQSLBƖaj演yʼ{ Vv&ix׊;P~o8Xkp ԦS e|l*C5h,"Cq$!0o>"O-n,LD&l+|5+[ SxIOtU}h-Yf4+|`ur ]gjx$^ foFߡdnCe Aq+ 2\=r6FH)vF6(c5QNk/ h [ƊmF"`Sd@IhٮJhM<ҽn/H)ў/)Kn[=>>_* = 0Ctb z0|>Tc UF/V]ty*]Wna0{{*ќ.vTv~8B(~Y"32Gj:9Gl m[QĹ~qըZhC"ޮ)w1.dp{o&oG դws;?]0iȖܿ]ZW߆w_ q:ZqKV^n[έO-J"Msr/^oM//~I!!kCu,!{YVܺu;WQg5oepg,q\,eFJLgȝlm>wih$˟ W1nL[un)y#;ͣ9r$8o>9r}+X~<("G#r&1ԀI[F+㍂@׌Smc7%}F4rJij6p䵓I,l9k!FWS;l` Iyj^`_Q]w} eNCC ^ȉDRHU|8f4 .\欭i5bH乛&*)X㩖8$qFGM5Grom_߾mx.ގf g)/O6`kʴ犎\ioyG+t:4|h[&}ɓ`o| OVI c)`'gR>6/r|)Hʏb'45e.ɗplͰ 3X_'E#*Z .s'gPScqot`R2ulSp°@\U+ĉ$>y<h|?@x]/6B3y.GIڽ65n,Eն~Q3.ZD%h':d~ e2zG:[ל fA(ғ'=)dχ)hi(]w֣CE=#.ŁWR. ȉ =?d*Ő=,SCa>e04NT)JNǾ iS ))]/pDXJ).l^~D8LjVCp%~`Uȏ4Ǖ|@Z릐!!7} \|D_r)\}mhJ\%ļ9izO?'x~ø8jo# Rz*Jʚ{5XBPW91%X uuF&@=_+5cK9QQ x0ȳDMC7d]wGrB_NJuKa[9#8jԥ~͙9K5oa5{qΐڠpϧs@UaZŻF[4W|nsqn AH|)Sm!T+X&qio.NPQmS!]>őE(r  bq[=h (ִj%6\euIffԀv,|GHz:nK)V~^3g_BBhCMx Єev2HhiISIND%)FT!2iq6L 0d2ʪںKc_t 5 l:>{5e쀹}sEhf:r]KWuQ R\vUhBOF/UqxJ Ρ/)AFS/A ]R e;=ǮV\,1TܞEG1&m/W k]0bd%ltVqL"hJ9fʁM)ǹ]s^;Tk;HϷ`e&l$ 43Nva|f¿hYkh ~nے{P>}ǂ0йi^٬HS saEك gOiE 1E3]hh{k!=g9VNWE4J?unG -2*t 殩kZ1SR!!r)%%onHsGik"6բ@ƀq{N2]zOUQ3%|Nj$#aLFR׌HpbdeSNYwI_"RRe,njdKZ%rU'̩hWBRLZO~tB1]E=>sxpLū RXt0]V!sR,)/D^Y0Z8U)>l_"5x߿X>XGY$/'_ g(nQ{"\//+mr3՜K㎔LST++*B5UꏔgGMu?o(@[W[QZ[?lKC9tuٯe6?5 ~EzA'=g&\YZ!I~uA1<\z'\~)D棘>,DwEىɛ<wFca'o56ǻg3y$7W߄-p|w7b ^ a}{ѴRNT!~kNa]FI4)II$^(+4LcxДi tNp^d$ rrY @&B<1*lTJvkO4IMQ8I, <¦)"J{Df>SD[e3L:Ч^;H(uNs+ " HWN]7dh2:V![H/6˛PjycD3ZBa>B@`ma<)tTwE{ޕsaQSNS.D+.MI',1L+P̰¢(֠Ys[^9uC0Q4|&Wknl  #B\9Pl mgfz;'X JTuT/sI2i:`x͠9;V3Ĭ%>=Z#+#abgl܇Mr,!E6ɡ35_lIt^] ^+:憢$E&iSZRd:ϙ88A9( B$T!EaroU10FpbtS=e1V3[e+ر $ʵfI( "lU]r3,$SBq.x%B CRk LB2d " [jf=R I8b!p넁QtGγr{‰]o32rl 7 L IgJ9P(o~"*tqE-rǝC!M%o T#P#%S_3x9p.x 4n/\3b7xcYCZ>dY/r3~6lRMZƟ%񺾫z3l6ZGCyZ|_oFjWZ xcnkelr`O`r]"BI:CL?sXw^u]}wibrzVokٝQهsÇVgۯ)ݤM&<G#is­e-RjQAiRĒh* ˱"Us y}0G a?YQk=Ҋ}>Gꓳ]sɁ7gF!rN9gj*-X;bJRB hyR. LAf1W(Ns ǩua§w~"f°RN鎒E<5O)iᩘK9Fq= `w^8d('jE 7a7PM_-M+c̅\0&krN‘ >F缏g0..gB(ӘԎ;BnYD}s׃o,;-9 i~ߚw OWЖyUjrw^V?׋ ^+BSBnL%o~<;~m]b>sSEoL9(諵F܈?iiΰkv f|+~g,_#ėG*66^3he87$5`0u~|lP<724C9,3O'o?Uj}Xr ?eNjIc~2> g{usZ$2_-?ܷ_&)&$>pTfOc+?o/ -~&)%DhtBpz<^2AensX7 M7~ӭ tVq vnAmi>?B 'ogg`d3ߦlNB$09K;'f4%:hm\o. `sϘZ^2ʍm '8Zz$^ѷ#8_J;xBvI;͗m7W'i5[ܷ7ٕ#[8lN׊?Ny.[+xIuOݛ9DȦ{ȷ`^ܯk@oyi{&u? ֯냲~m%CCDZC7 $MD5lk1HOCBnZLt{lqf`Ԋ;7{AQJ %68>szo}<%%9TţӭbQqDX*,% MڳcbINXCa{x,< ?jNLe|}QRr,:Oq.'q1`ʕAb+#v|xH,#v0F1Zm/w 9(-ܩrN5T DY @ԓ%"g*8kb%QȜTT &R;AHi2i꽈$%?QOL_$O/.Za.wi1MKQnrw%/w[tHDe ^͚a h iBrONGOa6/A%8*(U`ΕAi a5mXITL  /YfDPˏ "u(3S kSL>xZ[ʶQvb6M*Dznٮ,۔33Y?10Œ ?z(}!f._qLД̕ԫRھa#5)%`֒Dt=ŮiU'C9zՎp2X)P/.:ҭshj%h[pn!8䙳OqJ7_OǬAw=0#P¤Egm@y牁0|G]nhqww !ϜER[q~DjGQQ׈`q'aD#ORc9ĺ.ɁOuºXŲ:h bz'o PBTՍk.[.:B| fbQ9g1`%ŷ~Y:]uzp'W|V Jn6ߑηY7OwnuE54 r"3g!CjxWuW nwLJmݐEBexKUsHcFyv}EHE'p+G=w/9ТƔ+†%UQRTPZa$OZ:Nju_& =\Db_ԙ8\gcAnO^`f7ciXѳ7 A`%Bu  `a@#A 239I8jp۰yN?ذLؼŭlBa~ e -c'*SIukITR`XCqQ -a K8a3SEkN(SJsrkiN.ߴ2Al9j*]NUp 9hLn |S3;5A-Wk|; ʸgApg9yݺq>ACf.^Kh+f.z+2wJ9Ϝ"Zxg&Ol%t ϥK_}V240jc}Qk%:sVΩX=dv/zl~B1nN\,=|9=<8C!ݰdw$hi"AwAc8yr2 }@[ϧV=9+g=!a'6zq6Ju!~3H|HɬD;_ \jN9@S+X(Gr'L/̚2==t'Pyq0k0~#ҋ vh' ˳(jF;!N ˳(0 awFNi;N`\N턠;A4ՎRhF;!NЈ^1G;!Np1vN;n(1vFvh' Bf;$E&iS 'tLIs>Wq%L*DP"-`N`q5:uyNF3Eemkm AAho-^}'9[A 輌fhoQ/G#p*M*.~VA .h[D3-hA4WvBv/))~U{}2?tq6ǟRg xfY/_5̾xi4,9=l1X3mi!3o?[:^楽H! i1MK;1,Jy-\L$Bc)~^,bHQD$*A$ laG&zYHyF)}mG7^AEBy*EFe(b( bDrPu6^N,9lGP{3S7aFDOUo+0pE ҋj6 RРb˩9:*VG=R*Hă %Q5yLYE)ъ)Gvb6ѼH}ޗ,&jSrVA=QEh5p⢱+dJ^`!mb!2^E(mMs9kh# 8}C+ECm^5ee^tfBe򃉭`"YT߿edS.1ǨzW766|f:=c;kZ?[N{%eM>}}vF Se+{/W W_ouQj=.g ,(RI/p(;]W9 9{eQ$(MGXK(QA6Z _w~'z^v}uS@.oHGz^-Tb#;ύ7zHҡZ/M'W_ XLcUߟj1=BZYbѤ:Dңt&-!=KGPۭQCC=ZFZ~4*dk]Qr~?,o;"aP`GuaiRAxOn}Hl%,8_#3C`?7W{tje55 s@H,edqU<6esY lu˫/f6Mn1 @7.#lxFQf #2%y*"N`D%)]FxHŻ%>xB,+n-e|*sYH"L Ӏq9M0ViFx@jŻe?x,^kxn`[)s>m ,~p"A׈1lu W*ewQLeh+k DLpn WE5ɊOgKdnA5y,I郪[\L|‚ڑ'-WEhy-2<vo,YۼXJJYDQ?P#yP(<:Uvyp[)1|>mNLBQkmV/&iqp $h/9H.V KjNnWz\jp,c8p ,_JH8?cv;6ܘRޒ]mF2B=C/17ߚ)1۴>&#Pڹy1d5N-@mCjUSv$\.48ja/^躓e58V5)/Dx\HгN%l3/*/4Åk!2^Ϗ?黫ӏcLk$.eGOrNb!|. *b!BYJ%Rf">I"Qxu^)GjU8G<[y!m&Fc2&2R#-(`7X-D>1~ڌE-))RXw~\)C5z'a_㏈#O1'_{O>xLӃ AǃhE{x8_w=bDRǔ {*ZEcQeQfS—`x>\:cļZ!N]u>{;26OF]E l8*`iƱK]2n7yGofAfћx L1fYQᙅwRL,ˬBS73$Վ&LxW}Q%l.Wf..w{h$[Nn[>H.4 O8x"04È9r͉LsFӷwFUXPLJj٫uĹmnXywjΐ[nSU3SK p8TibeqP@V{PV|3L!x r_sKRKYko06\*f$H0x0pXVe8I GBd $&&žluguJbJ*s+I(dΘ[sUUҗX+W9id0^#D >p*m5L3$(GMc-@SLͣS <ٷEA WMl-}`0ks0;k[$-]>p] iOIΔOTa9&h,?Yⲕ欅oB?[7@!N՛ww_ǻӍ *0 *XbMpodl$@**P ٧kϦ +t7>W]ɶX8X_R8d7;*ppQo1` e~jJ#x.1K),T+Ļ犇㌬qu;nyWZ I>sɛb6>bkiv\ZZ\fA O?xAb0NߓdoWo=s{ CWl =?~}dh8ݗJO^O}Hadbz!-9ƛqEBT[x ϊIaEf}fH3/CCP@2)pyG6nǀ"To p<>Xۚ_ P.՚:;l>QӘ.U^aTST—}M B#q=T4[cLqSl} ™RL4X5،`BҁkLs+_w@_'ksMv݌(o`{ ('T!zk]c*6TCm>\ Rs̏Jɓi|,I8&GCvqB `Hp)*Ngdb͋O_OQDQDQDQDE-֓Q8DWԜ0rA=$W2.8)jEr=/TOh)9֖$z}OJ?YTAI/8h\,\E!P&ZyżF>U([TTa IX$XYW)g*K[J랑4"$E}I5yǣX`{M0|.m "SF7Zk%(ǧ \&)58},XKÙ|4r~hd;8tq95,c#ZQs׵C\EW${Aqia)R@ N8N8V>7WtIhVAFc2G#]4=*+Ֆ,8ej"AB8_]ytU ms/1x6X!0?otRoII JvPN 3u-+̊ʧZ2&ɶfM"*-:oVbQ&QyW(V&tč)Mjd؄aE25EsXebb&ǖiYy?MtȖZ6Jp<U%\R!ÚU\fGIBcZKᘎ&(6'u>`Ǵ@=!H82C]O߽E2[![#*@5xqtY,[/>0 fZ#Z4x= 8To}, _KP'F~~$#=4KLpM :I06l2?\N3VnTE-=8c8CgP"Q;+W\z4-3;4G/'GYhn?QQ0^fMNR)hDiͭ>L&F7ɯ%H(Ƥu Els[VUO(pu2$z,gF=,ӆbDŽ=^`Vq/F'\뉥G3PK.BgB(y@6g.|N2)LN+?%/:z48B\Kz) "?fJ̖VhI[tp)mHyֈQ޶6gupmA K66 4>%3'+H;ieW :yW[TeKgкn_<4KD[:N:"l _늖"J;}u; >.8A8\6㡔8ZCF(hښmļe@%nߞKt "պ%幄G3O~͆_Y{bU.j^+k|8>1\|!K6z(BV# ~ibS'Ra)~V׽R1)߁>8N"`NbK)ɦHD}+~i*3ɏ*&q @5fLLW+Wuн7οna۸r{<C9=j;h;wlzO;7՛7>JIx3|^uu~UxtXr67/гwٷ*LjZwc>Gp*#Y=ͪu\f3]€Y/Sdm nT1Z@Q|*CRx}0OA bD!>]Ukѻ0/PwBף;psSfV'qlðt`9^2!A#,2]/]SuhM:b:)jùO\ 3Avb$w6{̀|s;ruB-֙6c>د%Ne.lwn׻&(օc̅3cxuctx!-YJn>>|Ln/p> 't-<]$`%gw r'<+,L>wO |xkYͽ|gid],YcN$R] [FUX@D@DEׂI|wuS\srHQ3fYPoPiI-<)&``^`sQWe*Pцg֖Gh]> |㘂Vk!T!Hbce 1Zx /݂Z]pn>2etMѷlH3N$QXt!mG%c88P5U_jB*}TĊZ4F;GHaz2+K' |Is{~ˌ ȉq1.QJ!h-E{:1ӓaG1%qcz쎢't{r[I]Kcę^ƐLc{Xk2'-CaB)vLߌդgm7FK\hw~p0-?/e ? pOVaKù2[fkyu6]-L%U:)%s$-'Z/CJ*f]['xѹF^FX.]K|=:B+RE:g)t>:hH:EI9OڧP2- 7py m:o-*ˢy*çc'9>e`{3 RI*KFg@;D#V _7 IW"BgoConiYT}L΍%_3 bb$K[&S8DXH[zFq::%?|Tzʾy֬S^?qbGA^ٯQ ]G pZ(?,CD䎰㸯x&$c=o޴۽ڴ,{Fkl?oScGܜ> ?M>l^`, /"\T:U%ѨE^j>I*gx*~? (:)xD g^,5OHn`*鷭p0!,sS[};˴Q 8B*#I͚c!y*kָOeoIή^2Qk2L>?캤NǏq֢ʈd͏x:dIGlg_l+ZD;jQ/Q*N>j'}\ƄP~^? a?ϗ JJHXq B)%pmCZ>Xu{|30@W /H/#CA~>[44*^䓀Dis0tGh]$[kI_SDd_wrVU CLHΌ3pSz!wR`/A\V>8zIo<|~s\=餎W7_v8oW]z`UߧOJ*FcdcEJ1ަ5,8r5cW~wL:?.忉OK/{/U }A9RIqNQbnSSJ ci& 6J CJFvឪ>d]z _. ?Yg[-hTdRj~Xm[V*^dm`]]&Zq)"gR8'!'J)cRk%8˄1K$щ ʚ=d rO Lvs~7j%?BJ\ҋ]Hyt-E[X%)5N=&FʼnƎsX; ctBDN^8 (Im{[=KK uۯ0`KMo7OِO8x ADDFyEO}Nt6_ EV8;`N1el?NP lU8sl9݇5$7^b4I{#τVa^FŻFW5 [Y@=fLz2ӿV-WlkU:(QfĈ֑̓ 8@ j/U{%eX NRDp'+pl#IRl2pe3se92ǀ.B ٯ^AT$L"C@HJ" (&E`J(N%ca_"!ͤV8H$hat6}GB':hkGh3jbP=1ɏz-p0lRal"変V1_tG$-Y: *[*МcHoԂ0:N՚&z3$ 0RNb]$IĴ@`8B %@2q4lg+1苝 q`O",֌! a) B?vEbb'E)?LjTD;`rzc8x>#eK<Ԛ>X;M .+ݛIokr}.|:諠s$֛jthDѱt+m)Z%Al bk}A׆/cK.md!K"o z" {,*(+"CvYx pMvP tο'.>: A^Qzּ1 >؂k@˯4 JԹ6G7;P`T诽-p|uu%@xw݇Kn2eg D3y~p0-?*$?@gC) ,sQQ>bUfíY8\JˆNw̗Ӹh$u VKM`퍠m4com<6Ϝam:\\.oڗVhm 99>WWz#2;ZߛgjxI2%lS?Xz](8Wb"cd(AF1Ѥ:&gk y[gjfP{`F{  g/Opȼ*'^8W sAP=h%XC KᶟW T0^ =jq8& }%5hT[e &\L@c*N2 KRDEzDdǭ RmC%B RT3%`&XbJpBSf$J\*TR4JJ(REe-v8N`,&q.MmB0BĖĩ 2d DqZr\JfltTRKf/̀(C.NaY }`E)AD3 %F2cApz>'uÔsi*@SQKm=#ULbB99 }ESBdgΨ(-ruݪ[ELQvqcILq%L_1Ÿ]τPJ`U*|ƛH~W0\;Y?yXZ2 CVF Tbt{/l6x=)'g+/He}9)U |ښ6U jzR eC\p4BU.GRAYz< T\ z,݈ $u(_"9>L^U^#I> !<(Y7W- 8fvjO"R'@sAm cג )W~DшH"&"&EV@nM=_(񡫵 Ho&滌5! >ID]֊şp2> 9IoЬfnӼk[CyOI:y|U]kxR1^^(Pm~SOOd0=U}mL948dQh3%ST(,09J9,zu6aZy#Sbmf5 Wi@khєj BkۍI"eXSkDv/`5eB^[r%Y]ɪDEd,T"#ω2 UQ)7%Q"ѳ*JG]-2mWi]KRȕE2RN+u3@)tQ hRg<靂(AmS8j \2!!c p%#em(-$)D j]`j:["`~Z']A[3ZԜ6JW1X<j !EDE0 sn ZF' )ÎN*9SVb|ԘgM-)۟4X~n@#O#cZ)`]D>O@=[Y5erչg7͵FBO.@pwwo'd0~4 9s$JfLTZP#喼H %e4IkQ1o#fO@]Ot<"ՀCbg&ڏ}Byb3  :9El:BqI& 2f5 q ހ}o_~WMoMuHL*);k8 q4$ǻX|*"oВҾE K"8K=&k (LVҧvh-T-_ZH#Y%֕˂8ϜGϹGS!469=,_R'Sqoet m:K)e0ۚQ6rBs͟ZS3t&=ߤ>eg.fۂQH'9TFn eNqn#G1&iyrCߵZ9Ꞣe\p4ݥ4d>aHL1 CwV<ٍ€+z0Lnݦ|9v9"r~Yg W,jOA nрQ@THDNjƬBcZ];բ4 ԂyjIxPR3R,9nZP PzZC$9+brIMָw<]h!"27{i jj,m@IWNKK[U?ykz(gu Lt;j)KEj1HeB@ä3%'!+- 4&vݮӅ]9+Y'~!p=g))Ni򁠡4,rm'u!AcjɚWu's`n/MeƟm}=,>a~>?? SHا}ql~iאlz! *«:ߟH4<߮n>8Mj焐smf^s뿾ܤk7ٖP(?3 T*OO }'7D)Rr^ijNA g!H8l4:Sy.ݧ!ض*Z .$ZF08A~Z3U74z*ha\?6l٤us$W&jX`ifN_xΨn\W Z4!1~j} L0mcRWKjV\OP*Zl hPo腈u$!O"m礞>+YBH5|2ruw>.~=0Z="ó&Dnҷ{395M?V1ȯd:j'?l|4Mu<<\$~nfWZjuD(ӠL%,ʆ N㯏';w5sC z x7j Ρ/||;\ OPsɝypzܟ__nj|Ƨ<sZDqh LU0WڄW ]bѽ#J߳U;7@#zS‡ĐjEE$%‰Hw?uݎUd@t J/ G!!#Dms"q^؊6eQ'"6{gyǫS@O0~qAh΄h<P ??K-/e&~q6rce!Ąo苚苚苚&;JJu2Q1REA{TG.vZr6\u_2Qf"tA?,*m%ܮv?,.jؕ/~xg޹y0nkםфE!Uomn97i/S<{2i HT,Vj=Z{#d] +7e~Rgh`u_ =3w_b8EE{>U>2ƎF樾C1r:Ũ1KvzrAԉ)H) .ԋLIC(3ހGtN3\LLPYJI?Ag( 8bD-{m͕BWsƒJʽrtAU 9]α 7/MPI1AF|1H +0IؒF뼽YJ"-G@ -i԰:nקAK#8$H`)A C,\eTiːԍVjGX) FUW$VIYi%-Y+(`CLsJLDf5q懈1"DhJ & t/ﳇ4ST6-gɎ~t|HZXI/ԛk [6%s?]"[M4-Zj5Z's?qMK:)>"%գR(Ҟ3#U[xҞsƱ{/%a[CN ~q*M[ SD:Rt/#3& xA_4~ ,yYλSF[= U'B$p !^PVWi+) YW[S9k{ KE9^wH^bpqR18\}uf%dҺ[d',Y7Y|ZQlbO.S6bOr G{*?{C޹v)H:/tcF+$RH}T~PIrZv1&i=Vh v,YNQ=rܢD{p%X ~D:麇N!qa8o8pJ^3pqčP$=%%zq4#j~!i''E^yQE?YTF,g9 Q }dyes9aq[t\vU郊J?X(4C rBKQ\j#4v8ܢ0 A[Mȅt'Q)uh5UNroa8Dj5i\RL=E{e\M%Ҷ Z %>C_ B&n >[\)eݮRr謭06s-5"c}kKaK![=0bn*.'$9p$n0blra[kbJQ{9-'VpF7$&{8i@{ 먍zB5(8Ȩ/[2`gAk6jVu5 )=W\uxE^q h* ֚eX Zf0yk<:=q*c1H4$-V!_*R*}xgf~1Gu$ Ixᄡ&ʨyFa:|™U8iQ56FR4 Z#ډgRGHޔ3s^f[/xxqVfY /WjBYLc24RZ+X ,MDkJE6paϾ\3f؈s2ty.=ຨT<)IAF ڃ }|{%ȵbP fB-^՗/. ZW(T>*M~*;jgiy @XdٻP{;e(8)n]A$SN󛂌1Kus핢2T`4BZr62DJaWi ʩ^9wضo|w(kyxbvseq^5X\lڮ2mWgsqJIfҡm)Y?.K:LK\T}m, [,o] ތ6n[xk&(#BB" r?>Lݐ^a@۽ C!- b}sER*V"kWD-x55y+z*«0.#>3i͇˘jwg^YahB ]<$0ma 9OՖgj?ΚρQBUkZ>͓"\OBF*{ʡF\uKf!Q#8rw lu8_"Y>|O0sJO$9Xݒ #')8T=q6 b"bdztHj5u<D&\;3@)+WQmIoRG*2eǪ*{⸀F}Wi{NPV: :hɉR[:xՙ3ʐ56 oDqcӖ-lyL޵6rcٿbJ N/XŅ"YLg}HɖKREm`v.Ϲ$/b hkًќb)Ym!E,, Aq'FYSʄI"3!΂Q!_@ N+YII¯)$Fj #cl袕`,* AH!-7H %ouNclr^ggY+cMk@||ʣWTܓRs(╥@dž 9"RE!82K?=sKà-1uIa\ת.Bpg39ӛ6L* ke%I鈧S^Wm Ns 0AUafęנ}\vQQIXn}KQTh!(OohݐT H\ű dP,㊀Q0=u\=Q9%2Ee-a#(-E١c? ˒p;(i\ףуD`|5*9-Kî-;l .|X7# 2jXs.gk>lWJ5q,dV?>M "B߆ neV].]rʀ}؞UqziAӃ^cmz>'ϸiayKx"\@Rq!^qN)@>~I^?=xlFG9"X T%w{ই'ъ.*L.JrHb?%s(TIQ%JaãS`r 0^Y)<=}|p#:%v*Z){pp,< H6ir E9}tgVڏكA:@aݢ^1QuzEEuH}|j^12H,ɜR!x7Ȋ/$͂ SoFӾW _#8?xi, NΣS0<USW4<`^cZ" 2Vhކe MOoÎZҡE6қ[x2VO?P0rJ?Z>mI]^'R'q|2#bFOμhbᝍ]X+D|ߞVH+woj]2)"}߯A7*~_^ݺՏB$w8 W]ͭbOcCyfn?gU2IVhy+P-kTG<䕻hO)&?n 1Hrodn.S݊ӛRG<䕻hOqEeg C̽ Bkc@4/T'I<:}pq  y. Mja'M9}'ȅʵ'RҞx>3ymItlKԊL~h  oN߀*2RJoK\*8\m׷~Dp'S,1]צxPT&^[ To_ԫd]Χ y1Ʒ2>- $BMp&ZOU{y別Th7IG,ދP9鍑+D1_ ?YK)q-WS }@">g^݅۽cӻUMKE~zwFZ"7_W/x7٭g݇>6~2, a.~j=.2Ag .yYuؿt}XtM"^R{r߿5/r:V|wvstE̛wzwե9y%q}>9և5m=1p't1CTLĹVMb_9?X-Y/p4qD2x) F&Yaj7/=i/e3!ݭ3Z P^p?'sCP!vcghmT@B'"[͸FJѽ<{`1ה1< b(cV)F MܘFapNOsʋr9˻`y<8tssc9a66✓}0_/6x3,S;5XΌxfdzVk')ߩh^ HW샗Kq+{_-7zn/bt7ˎ{K(6rzmK@e*Bj]{!X"=L;VDU% [#"rt(#X`sʪgjA@(KK$ú)aw* Pyg+'+K=2X% lÛ5Rh+`L؜0J(\Ev-(ӫ-lT jsm{f"97K}5n)̻4 zx7t$[1e9ýA~t$ _Mi~ak-#K%Tl(3ce4$O[[ݼr2js[OuZ7?P8C㤇7\ *kg1ʐH|N/h%]TacIE0,՟gKr!b sk ՞g J3=:w{6 MDVr[ {pp,<ߺj la` YPWD#|&rL={cWp+& + 0yx(pٽp;~{k=HNzl295L+DZBVq sk;"XL=*gRZjZɭ<+) P gc<) Cފ,s#%_uI 0+,8QQt5Ri Z2YaBD2b +i˼AN)h6Nϰr{(صn/VMOzY ÃǃHQR R~*9 @uXK{zn5~Bos"ծn?ǸCVc<+#s5*ׂ#y+w-*[{Oql\*ޱPsZHPsZ3JZ3˧UD:"J];b4QٓA0 52#j0F:Qa6>F* EF(^#%uum%s@Ax(WQA߉Z@ wjE0S*xX(4,@p:>Hp©⼑H>Ip,qX0S&ڝ  YMt%>s`2Fږٛ]0/#[ǰH.gH;hW0h-Lv" 8\09yb;oRJ=1*NY6s,5U* iy?I2\U-({b+"'B Jh.3!HC_ CSH(SNhp-3CBYlMme#%x !Ieyci x[u0۩HrCGXۮfg<"-UV pLqVwK(Bo;]Pk`K_) ,HmbAP+20iG^{=l:,F_`S4a+zEQN JM͉⤶8i3y2g'B ))YKwL{|?1[/@n|Vb" I%NS a WKt;$`k2U\ߖ闦`nƤ!EӓfJڵ&Oiq)C+QN3f_U'3E5Ȟ2ԎZsӞxHIKxHIKz-%emZVhblmVOCP+kogN(2Md0Al8dӃlFWJ=J&Tۍ{ޓ= :)5+F+&cD]H HbO#Fl:- ~2آrWƙٴy0!Ý^Qt +. i{`gо|B9Z[=2QqA-d@TIƺV~{Z N Q1_K`SDYms 8=2p%܋k&0[a -vn^EkAxVB#e6oPd4 %H`GV[kN;V"$I˭Vew?}vø/F_SKF?.U2Dl<򝄑M|lx_~vSGFˋUR7!idSĎTڠ}BSk;O)`dmKIF'1簈B$ Z=: ֕z ]|A9k`㉌U'QxXMƿ L8N:bC+Tw5&ue82B`};.Eɡ&rᰞ}ܒ}9W;k['f͖;cxybAַcŲ~8Ѫ`CCbo-݂Qv$Q7wCP"SP- Ef9JmGNk>V-WHTC^S=`0e48b:;4 %Grѭ y.ZSjxʷ?SJ1Hqonʨ?0V~&qtC^n=VA~#gtq? W\},qtC^Sa&]gQynqD[^_nܭvGJ3 dtT'0iɚF W2oK_IB~O4PgdH?2)"5N !$N|`8ȄIA4t8wsʮ #=o-o) 8!65+= 2ӦggG⃣4>81P{,k'6+g"}0X_˧_|'^ 2f}M/x3-bޟ7<ࡶsߢLn]Vۿػ8n$k& ]r$"_|ںHN3MrWI=/ {ӲaU,x57֭#O?k+FU//yоg'Ֆ>zAٮ ܸneW*G!~E On9ih`Hq2|aB쯏b(sYx#G/̈́_̮||glrh uY.]flKjnYeŭdI4D9C6Ÿ^ST8"<?ޞϡj@ڵ/z{ 7Bi67֮Bˏa%9KtώJ?,=Okq,(H!r*nR+Lf6K绳g%z,7阕gߝs}ެ^&oV>?z*Ŋ &r2I" %(0P!6ټ;!\pjd|~|};_E;c󏿭DU778Y*]niyx@9ȑi/%maq h #SZU^gLYωQ4D%s!*s%C%PsLS~E$d-RW2ȵ 97g|_f\7C:_ǙiO7_<COb>f|Rq ML |sw~\ mX{A$%8<Ǵmͦț&n\Ӯs7PӮfsK2˵,ᕔ W$n DnUL2RXMIH\,QfAz]-GCekp(ECEXb@ZHQyTQoe U2pZ{QR nj+ we{X=%H-\]_ :g3|Z1sۙƣpBt'Fȿu[㇫&gm횋䬷oXr@1J HSrxhfQ;)@Qh&QKSF/'bul,T^)g"%3~8#s5+iZ>xJ=qx_"cʐko|un{=SY3wS {==jWS=]ӫ{܉[{Y{.o*|P_Vn<7:7 Eⱖ۱jt ޫ⃁)ҋts^l 8t.fkSk")8+Z.P0Li )5W,$ݖa-WK*1dַe} _/ֹ˘2IQX` e@Z!e8wZ8hV[)7ԁ^ε>$2 wBK -}A,Q:?(ћ$W))$'*F0vѸ?. }~edyy B~"~=}jA218bo]ț;l5|w}sFl?/;SaO>y*4\ "Ugv6pyfABr)&onk7ي;vŠDtjEG\9Ckꉖj>$+*rv3d-cj1(wtn' U"9)(E=v*)_R(ٹ xAYl̆'5eD*GcC;`S,%#f0yEa_}D&IZy^ze{k e14)xm|P2gdp1O1S&y틪j׾|C>hXSu⯦BGqJxw†ɱ>n_K.BP}5T Er`/ ʓFamҖD\Z%w(x UԨLI iASpEYФ Qt=ً+GEG8H=[9פC  %BafwCS,]igg))edh6HB.pʦC)Gn/q5hlQ5`r7/.Yui9cOZ7jvš5!+_rdzeGUaȢAs!` ٍ(-[1׷:+r.+gН8- JUqg:Nij;t)@]*wl~{ 7q1~*;ǟ0h'튊#.qG7owGhjx׳ŅOdRXGY+;CԖ GmF>A IpV('v-]sJ9>yH9yzgΗ _2D/{G5LϴEyǤ!7z2~?E͡?:0x$OX~$> ʨՎ/rf>V\ a=0Ȑ0ިC5Ʃ.EKw>ExՁ9DžTxlӣ3p7;3?G8ʳ:3Y bd#2RIXxcyḺ, S|ZwLi8{:-~8[h.-|ϡo̲5bꢷgqQd<$ Y=3UYxe!@0F) vfϨPy1\b~N N]S: E޽£t.@QhH5E5j3h.=r74]V ˵"M\jkqvKDQ,)mܲ e+ˤQm2\ENj۹v׺η5TkçekpiIMumK(6iTʂ&aATɰ!^|>vnkT3ގ.J91hUM ZҗvI[GjZ%O"seѭE$2cFl_k\DkdMk6 VATv;%[uRփ|"Z%S][u SlF5&՚Vb?˜0p^3yͲɗ ?7'UsrC!ZL/гp%ߎJꄴazB9 @O$ӰLI˄pIb-h qO<=2FXX%u[B=A{I)EXrcq(=U[p5R G)!E"{ɯ5eɣrFiGj-XBbVd(Ν@}4թ? ėSNqqŸ'\|5"Z/^`dBLK(0kN0tbC$PLQ/A n;D'O"bpU;*to*8M@/#Cū4Մj0NߏъD8%lt#0~A-Uk9VX.'@U@>$+ѣeJp ZE#ێ󏞐Bk95WRf@1Zeort }3Ct7? ^zo]OhZufOo}փIJ%Ģ]&nQM# X=^T 8fMF_Q5՜ vb'H Zo/ i&͗S*%~'ZCJzexH&h8%KPuqrxx:ל]}_,58{WϮbrVmUn72+-{A` {@A ~SAP꯽뗸C \jMGlLتOlRLت5Vl [PiSZZ&QJ&k$A3/8gzȱ_3]Лd "3O*#,%9blSRb[\AUb~9Mnš[0R[Ryε4MrԌJLkϗ0=>}S7!/hq_j&l+)H0kMB'^1v'g@CS٫$U{ 66vIOcmwJb,ї8aY d&?jDcAΫs EgjQD8.?\Ėb =X7" l{IM^`"_^>́ (qŎ5UiXŎc5Oŝ/5✠ޟz/@!D,!cQ'Dw',R@ğLBC^F7+ u-Չ}GX)m/M@BC^Fq7 IJ-Չ}GƯxwnw[ y*S>>nKXۡu DubQǺuF"9íY+-pҩsxuF_u\hG)$vJ$c~ct$0b8!Hpqh8!H p8!H/N[1NH10p8!H z8aR1Nㄔ#7'N'qBʑ Wd(ql_Oh^YG]gm8JcM*E?LP~ܨTzq#XFzƞzbps)艢u{2!{y5VuMr/kCtoz3sD{bEGP '{;7oW9E ֏t)ҥ}-ZJϞ[LݡL>y]. w3Ps_u.M>[2%gՒ^pyLܦӹsngg @Z o\W=89AOfOj TH>mqH[$l*f2&8bJ&boT`7!}͹HbhC#'|Ҕa@h>.Ԃ45qd걲-f! g <:;c]J>8&|p"SӅk!怦qd pWC; ԸPufWL6pqpGL!lxJxJT{Uƈ-Yؑ{ ]⫯d!fu %fU6,P|v h r&yhb `ީ{;A}'0,?oD ۧ~\jf_J6,^bK0G\'yD/~/Vz$ J/~|ȅgsn7xO:װZ˜IdF\x$H^- R@ \!.8@ӶXg|=3%.>I1nA y1uFsQJ5LRZ}xE-^ͦկZ i1'<  -I&˩':BKD·x!-$輀PԝlZ_RK}rkfaE}k1$WFO!)CJ 0x%q  v^ITF6D)-8`JVjC -*ДHN6 '!"E*ʋ&GPbֹ HKX ]pu@(Qxif! 0K%cGE//!d+J@b*P*7rt.Jf\ s)A_{ |m9=Ջ*ـu0!%SF!A(4T*)4(eNt^B*XP(L;ՑZ-@;-!!БI3"ֽ'o `=\/5jzI;Z@ݯbw&t@|r?\N{>Bt ?N6 2-f( 9=uD:s;$ 9չOo}o̗inޫgߢ KݐV쎪vӭw)#*s"{n2e4PF7btèhq9k=cfd3O]cs/٘ VðFCl?&$0x 9DvK \%BTi,Z!q-HIkF9F†nT3. fu'mUg~c 5}QlǒW8po09:[چYIɞ큸+וx xzh*NFu9DZAz{nfoyފ?kƕ6+y5PTLu3GÑDC|t7ϭ[DED%*S 8n+ Kb\R[:eb^^g)cЗÆϔG{#9d(b mqPZ#aCL.2n .K yM#2% ]YcO #(-ny~ *8n.5St[ ֑~gg5,0(ǘ@Qa8Pf^`DХe ^*.@Ƽ ,RvaMT :j9'?wwX\]jڹbS˝S߬/?x""2~>O^/9S+A,',5 }#WMl& d/?OU~_ "4*# L I~a/d{)yZSԧ͘2bb7_D`VAGQ[+:Ϋ`9/G{0!늍V!NCƉ./BhՖ N_Tx5$=j!JP[! \b#h^`QHE) XBe((3PV2 ,%mHkΆ` 搑̥fE9T00[8W5 SASZT@ qQ%T{9.ѻWh6MQPv:ϷZR>.@ĩJ֤@{y.7l\9Ƀ6q-_#Trj_Ȉ0L.0* (JAٳn mM;b$EK {]"dH¾؝iKw8^6#NW^Ԍ1-$;5x!PcȻ卨 #dZ?o'!) łS1B .qVcqЬQ4;5a;sC1{wl&wm͉z:>~=;{fce焢a2'7A  }F8ZY_feeVfebSRŽ}LBӋ38H5xkSӳs.]]&幠?ޟ^SnxrRO\˂=%i k;}9: Hiˬ4I,%c4d_v[tM/;}Q} ]#0d E1Tn& u)&+ރj>j=aNn%]B}FZ+LjR<fcA{9S^ GHs6< ~fJH1Egr^`j"&i㕦\5ǜ mbE$ 0DTkXTLOX~Q{)j`2m{)@(%sZmIB&j}J vdF*-S$AAh"8"_)ަbddR;YOܓA?3l |LJAnHN̒wO_C{hzxgYѭ :?{d΋~7oq &SO z3t0hKO v;7!B$ReW҃.p4BRp-8,KȔ{FRL"N BP,-q0(b$ɺ,D@jB猬(ІoY7WHؗiM7W+I62 w^{s0 {hnZ<bn^eK\M+s-[Vq}PĠ!Y(rv\wM1ޜJ6eKx6 C>\4|5T&@8$>H\#LZ0Cc򳉌_IO-T_Zbj*6jS:!?_feK]22q?۱ryM'w=\ Z lilL;opgIvA:HƁQ=fH5^2 Q N: hC\#^#/csmGyłw~` ~J /ދD0DWWh:{s { 4fV܁:ގQ"p/cD<]ʅxY Fl.}48be@8YyĞX/ $MNS1Zq5|7U\p&rX]O&~ D9#JML ,0kSc 18+Lkn;9)',։,Q߁'1 [p3 3cl$6J!^0<)RX" k AJuZ:VL 'sY֋C9x%JJjQK(QD8ca;e56%Eo~b/=SLxmC;f $q |&WqRըK{C|÷[1- RXny>cs4~7`\i?`EZ1ޛp@ :RJX]8+Rp=]ĭП$G󄭲UM~UI[>nYOUwκcس]({S ZNxc?zex'͡r otjcŐ``RW)!1GA#{]aSvgJ % 'hNȍ3)\ʊHC"[IU|7?ݺ(o&%^RX3 OnV>}:˞^fC3OC&v^={}=#m)QL<m:%ōTH[*_bomδ*fz~ Ǿqz,ٯsb{>EF{&3V'+5+u!M2%ۅۇGxǟ>InjǽCgqBS~tVlMeėBE0 LLzJ˪ēBay?XNjl۪}R.tㆩ G9F9%rNc!p83iK#ϭFá6PjR{{O!,9Z ),{hsZV崭5mL `g^jő[@%@|3bA6=W3AUYx{Eؔ80Ad TkL5O\W25Xx)^U5Qp4 N+?M 1rs:,F!)[9\^zk.S >>Repb~˼<3Gp1/o۷Te򅞃aygT qû܇O6Eu'q'Nzpq86zt/QAgtw86~vi߃BcB/ХU1H׮(7P3ӄޤ0d.īm]mb ^<3F<Źg]z0)rVҥ& y"ZI mNZy4gݪbPFtcv< 4lZUQ^R!!\D+ԡ-z5ϴDžGnU1(#:U߱v;xZ4ݪǁJj:$䕋h%ڍQUŠT}>g$ 5VMIVr=\(rW^#h؇_dzx*t/v2;hNLj3,ˬC8 /aʥ(O7'an4˶4`Q=GL_GKh;ΞQR&Ho Oaܳgze NjXsgQ&߇I'.jp`,EGF,jKZȋ~ 7\Ҿ ́BkZ\=@nHi>Kl-Im]V ~> +UDh>jDL SƢpf &k Qib 84{Np% gQ(# i|vڵZ2TO=]$۳5`?(>* pB9/PmW2Eeag^e9<=Qo?PK%xǠ˥J 0h9sR aihṔ@r{ΪIl#?>r,^1\c α2j 8az%KK\ji|13!{Wܸ_r]z2uLR{wdolGkrD9U5(٦$ TflQ i4&D;Υ HP@u((ׇԋLm+govLck$bt>z_N0 >"ɂS{Rs j0u_֢7xxeQq!yǔ!Zu›& (?<(PM[ٟ}(enQzlXb\ּq63l8 Ҝs ?9v!?tOw.h[<4|ko=}n~uDij?6_Ϲ_ssOF3Y!I=]l3Xߍ4JeZ5gYӇ紸A[J kZǕXjT\i'nf֖BiYMq<;j:1B"cg鼗mNὢx< {E'^1̗ܽR2i˨@p9gUj D7s<|m0Qt@9 s3e9d)H -sjGJPq@S)rƘ#`j]pkab{uW]zBMd☈}Aa'˩G0#DR!SeF&3Q(ːO;6[$>c\r<5b dOXnL[o@>|/;O)Q( `fqwqFlM5?}sdUx_}s$r%=[\LY1z=0\J fu0 T͖[` D~{E6]!-uRBD7X>=h*8nW'0|Pla7u<3yTMc u,̺"֏y1] `e>]t>~lbݶmkjseʳlw]:}wwrw`w7>?~ ڧ9BJTbz SReZ]?7zz+XQL?McʩuzO!@ 9{K5GFsz /i4)pQT:xpBv] zXAz*T Ӄ+7Өd/ɣx6|e0IV۔WlD$nwwAoeeew sk6[Qeͧ[]9BY1].bwߖ$;_?9+E1]ȂԨ2)VQJGPJרwO,!p:F%#t*igaVscӽXfd$RsmCpWFGO**ePCFȺ±5$--FZU&3B+l8>lNhT]@õ_Ş"Ύm6ne~m1rd<[:[%O4M?A/xgX&K W]z}34't?7Icp4g_c4jmv0aH9zl'+m1[ 4ؽF-IG1e9VbT BQi &ȨsJSĺ Rp>Ψ4A"tjmuNsnjpY"8@,A!'>;JjL+x?DvR^=?V3pγf&s wDp]q4KY:Q*Mư:cE㓪4iDF*dSRi-iv'BvԿ,Wnɔr~qj3_;?ϊfĉyʸGIVX~_Qq/§eoVza4d3@;jEmӥ$QV/;O=ŦbJ$$,\;ǒ܇gp> ͯm3^1Jz%jjA6酮 )TiאJ嫙TLJ[ݰ| ^XiiB@E!NwʱSK5:I$[F;DݩayVzQѥ;ÝNÝn-Z ßAo&wO 89pf9Pi|@lagH>@X `M0S&eװ'sS>ޚF1H"#Npl@zl&C7TkyxD =@IxLGZ)lقSD~.}`X344@Q3Il,K ~wLrD2L 5\D0fAaм'=x2FpXߍ' m{lUz8eT"TMJ]$  4&iUJ-@1)mu`0O0\S ӎp^ 0LƬfFLtq U#oqGdAhOt$m[!C7D?Ͼ{8HR^8mqGRCe5F/yq>X>I??\Kps.9|U-eDYdYrRmMyJ\K͵H4{& ?}*cf 1/Y֦׀_w`i藇w #qϰ|P[泑zܡ݆g4J_L27h2Z34 N %J2nM4#fptEO(ıک=]c`W4aic\hk9=dQph֊a\# f4l:Dᐍ{+ZxrI`?ˋcB_n!H~*ƈuYQ!n=-Ec[Rj2J(tHEPeW%IK^.(±M {IyWQ}pa(SHkUZ)ޝU6uw8`K)q{sݰC$&g,a.9M$-揼!^<Jxk+^cMP&e#V[Hg~2p{Mm8򜘜S&Gޱ̽`uHPUki+H4 ė!WHWږwmB5{x=KX߶j| M ֡ZZOF o= L E=3YF0Go.g \SJL<7 c/}L(a.^x⥟չXͬ492R8MiynEJ3))DZlcDe)?fVľ(*"孇7AoW&$<4^!,i j& ]g$QI3VyCZDxYF43;TLMSX'%*‡ЅXBF TYCZ^HP7i !AΌMiΝ5Jf!v܇X PRd6eQ'Vkg֛AxFz/6b`#A "xz|)+3딏_͵@-<в/)J2R172`HQd&g|C)uJ9ψt#EaDk(B969LwxXAP nܨ5@z0WcwQL$ܜ;cW'3&4ð"47 c 8}cL%ۛ#[ϙ2$U=F,dt%DC:- a6Rat]KO ZG `:wR\ivHNa>r05s[ۨV 7,ps)%7h2m~c%`'OӒ6c JFD HmS ܬ?v¬dfxV$kus#f2X1==X{^AՂ&GS`"yo|ڣ]]K6Ō#<Wwœ\ 01@.=½!̻p#Q.܃Da-nLjpz # DTNV* fx2yx*R3^3l\:CT4~q -l}7(vVFSpiH =ug^tt'$ ˠ0IN>>FJ`fz_yt.l~U/E {@%+z~}Q ߿7TUaҵN!*hY=1R}?2|{ tq.cآALD,IA",4>ռX~K[նS?Fř &~}wp` ~Oq-3JXfgg9B1ߠ]\4xͿ=+u%V֢`F$Xa2{+;|!mT|aX@Α`_/ ,(b z8FWp 3dix Gb͹z &36 Q c-]Rr*8^%I7\$?)V+IcIt;,;,}"Yy;>L"aFDF%ŌE!- ^h*葨s4)$nepÀIe P8l;{]|ess4pLhcj QDiWZ27#ʡ0A]aϮ\^8EESlOL :$ڞl$SB -K,m"Ȭ>_$,J~VWi7RQӕ+|4畽:/9jyEӡJҡ.$#镳|/;tW6LNT_x#^) DQ#Y"2HR3!uSH-v h&b"tI SA&5QVFqżf\;-eH`.w2`Hu櫫mQy5$a,˫P^M-l5@+;%N2iQVE3}I0ZV+s8Tj cv3^cY\ ZeʌhhgjZOkNZ]mzB%/AT.ʁ8,A._Y@z88JIt 9b *2e&T[jEo|}~ <Ɛn7 1/Ç"]S0σW:˞Yŭq rq2x=6ISgV(3qod0 ; hlugZYI6R^LoIDkO=%[R4TYu,B!1U?|, WgW/&%bz9Or׉ }!\ ZK9.vҘҧ%J1}Ě0Dҝ xuE JΟ~J,e?yl\+1JlњN2"!#rF,†>m$9,c )E~QuqQ.5嵳+4](t*88~(F]䭡"p6}W E-7Pwu:1]"%0׭'m?'M1D"x#y6^!u2W^tD\UT #뀘!dk$&w'V`˔* QE(RuSRδ!|WOT8|BhHs.v3Sb $kJ˘f!Z1efYܱ^U霍+r֝%3m0'b١ta-q9BMZ\m,E*UfoQh'|L>1 wR,`=Y>QTDӚfr (\!chApN˼9'T ~ ;N?FU`{ T˱TuTlCp<\;Nh]-ڥhy~>%"::Z"ϩ?)V~}/5Wh;ľ~i-U6# v^-dS'24go& =TFNHN 4Trt6@*QB})wLq n.ٍYXu_ʥjm :ȍ|fVEp=%)UGE2M,_a!MMZ~2MU91Fy$JRIs$wbT~E*E؎6G4ˋ8'P_nQ9a;rN3-v`b9y_9J KNzP*ziuUg/ mdXh*yH^'L t_ _Gٞ=AT+0]mqξ=8ΎDtM^: W?I_b!zdkB9Ὤ I;%劼c,/?//|ПJ&ަҽ`6\F}6a2 Ft'|qR>nRMJɧ|,b cVJ;' T3f15qˌ|ԟ3_/;vnCf8` `]|ا[5Hm:ks_٥C_ %5_"ӈ8nmB^RAZi患kM:zIGo7&fgu 3КEQ"s:h!R!L 8'HΝ&[js݆jisf .XQ;V?q7ɈIFM2nF\^F8]c=ŁI9E%V+""Җ#(>ҩh*і 꺍Պ"D\ D\paaY.X=#jQ,B2gN0@*GP-A٭;*%@; ;nmk~]>Fh 6ȨLyI<%F.Qr $V<`XcQ=0%ugZ` Jq7{`<2 X}œaΤёഢ@ФK:MMÃq4h5 8QZ OP4b5@Rjj I5kXw#Z@yC>ĖH3.T}Ml~Q`ZW0=q?ΝŇ;32?ӋL8Z~}[ۇ ?"3ϯ/\O %z8~}l5@+;#3yx7|.; ~z瓀E%^g~IӠH]7PJ łwj|k(< 9 Lg-ߏIX0Ny`EO0"a#$W#%tFfJ%!%fK}?*[5 $"aʢcRU0BJ7UJT)&|53MC~׫z.|8q> 38KF 3+a91=hJؾ(8;y]o.'`_jK ^hx7WWpϯNδr9e`d43? oyA4H'L& y,#,1ߺw> 'lՕL;HKm DLVDK$`k㉡`r4`ǰh`c v峰a4ec(q1d= }Fu ФۤXjn{fī PtVS F sKvC_"4Ss]}q t*Q,ާ2 ):DHZJq`NxY1]_|X)wcȅ{ud ˟8H)x8 yȖ\縖{_67}9x?z;z|` ?Öd_1V8a^S 44&|j}z'/5Kׅ(X  $gFqr=[1?o߲Ó ? 17ǰi><yfewëG#aF%l0Tb)GC!ϩF:h< PxF0r)A"A\P 6kRsuՆpEKS)NQ$F%=XxRZkǻ/}/YOe~/(P@jrA:͸}KY;3spR`?W:HI:X}7s;j肗g9>C+mOx04O5D7lh2+(5ڼj-ݒ\A-a ;Bag5wgEP"pûVM72T-F5[Ep'[':gktlD%o=u'QOwNSϑ4h I"ߍ?9b41\QL8es۰ݣo]H1}X\p,J5 B{Ei7ۏvO ]S(i?=3ܭ9iƻfK|,sTd#y^ZE4K'>6Q >  'uz &8IGgmbp+J\"PD:9) D -N3~Q,1XY$b!LBĆP0 C ad&Dy?S 6vha?u3꧂tu,z56i/aytԌ0 S+/cQGאJQRCDi;>r^~((c@VXt({7FDK\: laQ7-]`& !Q0?2i$Tp覕LHPC{25`͛.P}!DMS3Jb-[D)P~N .{+0;qwemI 4Y>xe9bcv HI$@߬H6j =8Y_fefU1m<6S:<39|=WWvn/E*ԠZj;)NpҾC}o s琉 QG!o'Oeb}CFiag)7ܬo@yhE5b ߐ2*vIo|o1VIL؝*!6A>؃>VgY_8Ԙᖠ>- V%Oe iKOKO먽+ESm i\宜C[aH} .|>~z؜,{2k+0'p a)d2|;W5i^X%9 zE3|pk*ipv"D5iu-3,%v5oq&Y&lO}EW߷7AeOۤm3@€ϻz}C*, 8s]T.*5ĸ0nj$>K˘fx\efY.2p-U`ޫ;ގW{NOF\q)g,`O1Rak$qoaH\JA 1.NJ}W,/B`7-4XEÜIMsGӊzKU"vi $A,nlxmjӝ^SՊMDG,.*2M,̕LQm3ØAGfr-y2( xJa0cr(cͽLn ⲭ?Uݤ_#B;rU~C-[iݩ4gc+f "ΊiM3C9Bzߩ6#ȤXq9X" ɜF;)cLVBy Ɩ 6e)|2~5&UD=C^hwHT]*kؗvŒO;P7V&ᶯ*jm$? ';yl//Nvo>we:AÂ"O.РPq0gۑ o3̶'|UWQ/_`VǙ/TOYIg$b q\ G #?/xq>_ N&b^eG.X[cf` Oլx`vVPGkb}57BND ony|'7hbKArxߎQfH/=3r'foq}r[o!`w>ΐ&_k&" ,d6xӘAc6wr&v1ΩGǂ׈!X܊,DTxqN_)>Xu,7||Po#YW'BS:\PUX܂p@,1+G=iJNN$FFQ +R9_-rY) rG:uI+9Jq5c3cJo@ e8Z(ua `]#nw畐[8X'\A^\'nDr/͖So.,p/2`^|.zb܃+3‚wn?.+O|-^17wf`࿟?N'vtp= l&ТċcЉ}E,Yb1.3 J Ci rɴIok@nO_<[tTB&CLhE^7k՝3p38J\Vk=7]937RH q+F-qNC,T _z l5G`;52-bjC~O#z k=VQPQw<Gch6v":ZIG@(<~94 mPe:(,b-9eU.sz|EWGAڻ5ޮ>V:,ԚWł#3ocQYq=Lhaˬ 0cf r2*\n~o^z6"vG %xA*P*&J^)U1\vj\ZS?h^o "/n|^sSsa>cNyEiYtxF*b狴&ͻd?z%gyL>Y"'Fax[ܹZaVbt.~H_\DȔĸvnM1YGnu1HϨݎOKvmV?+7Q5!!q!Sq˷xg:DMsJLöt3?ko%DH΂-P$DI&u Ǭ~# T'Dk.CIo;Z`3 CUX5݅pCWwd4:Q3N Kk~BTD$B"0 {*aVzX3 Apg'K-ep𡧜[r֓ _Nݍ+ETRRo]+TyY#FG_M_֥ӕ}ڣ5FF&Ha"%G?J&1i$bs3I[cA ;:~N^R]IS]\AGm #Ej9*!LHH#9w>7Ak\҆Wڌ?j,-~JU1b$u1Hل*$ǔDE9I}]xN҄Ep"XHٶ'K]Hxٖg<ܣvnj%ȵ+B.=D zXl$ ]Fz=|r^/*DaCbK չHǤ:i{;sQ=h;:H>_97aU2(@G1X(h_ghkGi G-Eʹ7}V3Irt.R©w x] F=X/ %Gm;Si й8BCV $Ho3OWVay R1]lbt?>_R"*fR˅VdEVouUoo޴tWB*RFUf'FsUSK ki9%?zü|>7~zf3(䯳$+Ps!l``/ !ǘaɡm={Gug&#jŊ+>?} /Ȁ1'yD9cF ࠎnyc=. J5T_0k@2$[0]^ E,Lg[KCD3;Y;Çrn~P 7qj D><ޚ/AcwͣKb؜A0Z+- N7o21_a2-|J߿v~` .w>㌡\e09Y.S8_?qX<)$RIcN`&;f)8 F<*cتLYKa#~(q,k4F)r&oOmI\/Ue/s)I'K_:k 8YEu_-qjE͒V/8hF9G[jG%Nv氻: CcsS]:IhO'*e"$PL u-F. E\r$6w.W Qdiq%Qk+2]1Kj@_꒰g׀S{95c.`סEDpf6{[kP*8uƓڵKD׮dzQ>rŢs>1,U*LJr~Qۢ^Xӯ/Aood)%od{IEh2P2~|32-B6! ̐_.F"gZξ[)vOiE\ҵ3LFY;^=+lyjRVO .D cAo鍊"JHZ` 0 9(|Bb 9b`#7ʭU6D "s#Z*=c'~F(ixb uUSpnk>2zxwՂs>ðw? +D@-ܛD`J~y>wh n=#xtMv]7|\':Ĕd Xb9e 賯?9} OIb5e/.ʪ qEMXuFoQqv,w'BK]/cU ]EAܦ%\q:e",$n4sQx>]nZݛiXK%I$x}M uDX&cRѴE);#Z= A#z\Gt~u LFSuD9*VtT`Gf{]Mw̍T4Rٰ# *#(![*[Zk.VK5\87^Jӥx <ҢM\$+Zcy*&Xt\LBաc=d :"5SZC;Vit.JվV+%9(:}Fu½SYV"w9AqB90oIN( \Fd&tTw_ (e&FXSG;l @AP%IMAkVt%(rw*="luv'&8 |yq&P>Rl{*dqxp8@UeEb"26lmmӍ8/Mz:w.  bTG.xG-}>w("r.n|~q1k  ںR| 0 dmC\}NI]hUjwܻ%e[%4|~r~3d]_"e݀^뉵ݯlI);I7=ٸ 7[>;\2?-]dߛye՞$E4HhʡvAb#:mn ^YNq}-ʝ Xv>. %\LM'Yn*s8!.>urgvYJ.l3pX(&)0 I!ڕ ,ӁlSK1{`2~Jۇ˳S5i܊cۈEsiy~uբ?}720y܉Q~OC83Lsd|4#T}߷ z-ײ^\ciS R ȑNV#B:?~kFNN/TQ?.z'ֹRKK>FP95>甐s"E.szwWu&"{ 5. 4tns xV! 9@ޟx29\ p̀}{ίZ{ oYv푒Wp. YA+ K?ځX!uAzA%v8mndT -Y#K٩Q[ky͖j57˳vOz{&8{`:^ߵ|\2ܬA*s ~)i(LSHC&"cX(.ZfmHkZ{ܪ1_|}Ζal^8-T"*z>lJ{=|$ANp4x! $b‹@a }ZPp)2Y,,c?tk'3ݠR2?(-˷[ތ.]wF(<M{bW A"!' BrnRHD4jV "tpjL =7bJ-2nN~xj`>P+L,IIaf4 |5$i@IJ!`+-} #bpYV&ܗkX% ]Iփ"j|yW}n rd`+ނaT̬ )yBT, 1p RXW *NYjOMxl!c*1T(Ma*,\)Xٹĵ)`X U"))aXbjU+]Ttm*ijK+UJڪc2ڪkl_i9քV@H1 6@R(ߏ˝lq|x=6$~yLWp"gC~V(]5}.Kޮ< bY u ^;a{/(;,@luF"V1DZb &'OhKm])!K4\5u2D}]<ËO\I\;"X]꽞Iʹ+0bzԗw5-視[%𸧃 ׏B5'~#oa^vcJ9nx !v0a"jdsWC25JH(M MblfƸ$bPcb( Z$I"KSAKI XC#% ) @rmPZ@ GvzJ}O[Z;_㙙~^yQ"^@Pۯo$3-zf.L9bY"ն3wC9? z8HG|E»xtHC2~{xgĄPZ[`f󿎞F7Bح'rI' RK&+|d>Gr1#HtoK*A rC.;9-:1(_2_#g?z?wrew1]\%".օhcK)"vA1O9bnL0JUd/6ia_¸`ow}:hs {[ "aRbLs}"0@hh.nc !Z-%BjनKY87y\B)T w*T^۩u* }SI,QV*R `^gQ2󞭝ʼN"r F,*DX<%19CJ%i'` g.v!MX%D0~YzbZwUaifO^ቊpǍ TðүHMD1ŕb;v]z.ɡ(5a22 yv DH^b@o+D8y1-gL@G(Q:/1jenA H=ѬFlu8^{P6A= ޓ5x[HW5(!\>ueK !rca(~."T DG&_(5O U\-Rc_fcdA42 k#hOJ)Hb )D?NO؁vSϝ%3 {M+<={9=tYs;Y<=dzRuW #ͭOsߋ#mt"0}bӧk ɰ39;bs>F Q܁YIIY~f$l,T\^{sv씖 &,yyz$N`EPF"nnJ %EBòtSibA*mWxth$l9aڼ`"y񲲧'^|H%Q,bdոR)841Z"78|*髼ݬech *\&*' D@%b:X#di| 1q 1ٺ*+Ed-i8lMrpV_&y'KBsޡBIɳC^i{Ww ^K)5 ڼ{{, 脳ߦIgĠ5xX&ºwRe7CƵ_|dvy`oMYbYt|6#NVYB6(@ɑ6B(#؎:S11ׄ /Na{3N\ʖ/'3-s {;Ziq\O٧p`ӻat.|> 7#9Q0| YD_F멚=8ӯiȥi&i:`fBr]Vm}Br?BGaZ}j9&t"b,ZbƐ(eD(%úݞ6 OAR*滏{:<~KiSgB5\) PeGtpf[k08Sl! `6\I3@ }Q%hBeooc`[wE,ν+BkCgyLy%nqWbj7p{ 4qtHҺ@ f wmmJrNUaRͮ]s DtH'6 )RsE˲ur>!Q/MJjs :d'#{*PHvfxynu/ ѧ"i,ۣ]A)oL 4g襞3ŶuxDb{ !ԕ im-_djX1Ѯ-2Ft+nt:Sveqw}Yq&*7w1R K-S'DB.}Aȉ^(~_Q5[U-̲8]zy=ُg.w_n4v?^_:tntFatFa=n; T5R,Vtw 9Gg(q0bJY @43V?C_z0vt֋%=XV;P`.4Co }` X4Xr Ҋbj=t^cTߠTwl. a4<FcX4<.Z!Mws$ Kry_h0_ ]DŽ׷1@tP<B{d:[ӛy>Ńoo;Wf:a4M?_ch B[KI?bb%ӂJ+u.qRX/ .kTەQnY6vkBbm )/"?( նg qt -|;2ĉ8-9Ղ2qK? C=C~hNe{!?# FK`͏qK9,M%凞HEaqїC+AD-e%ig'w|ރlApe*)f <tߍ?ys5,{P(q /piY?LT㓷yE/Lna3ۇ?x>lB5X ^G:tpqo)f0@ N!-,cgThv"oI3%lͭv񭳱 .OX+ ,"Mi=Yr`SHbGiB;ߐ\Rq,e #S \:W15{8K',K4a-r5B^,SYD " fT:#T_nRMX!I?S/k猞܌6no~y?C?w]R|_)9 Ӝq&f)N2!ĵ2nBk49ȗH[_+ !g<4 Fur3,*3`͍lB*cf}X OjweLc qޥcDebҘ(0 d1=+b"S:LY2) !ۃ4U@J9/fv-`N*PEQl<(+x@7x`!0 gSwsT"IPjXҶ|󹚫?_gZ?9ڗD6ĀpErnBA7'T٫Uh%@b:PDqXO*1Ѣ=^eA!Mllktpsvz  D@[f2˘#txPŞdALD`æ2|@.~|nN9lF6"c^Z|cWXPϸ9v,fP)T=wFadstcCpA4XRo1dThFgak5,Ō G>O?<;Fq8JHl1),f`3!Np `XhuK. J~L&ص,fuL "'?>woM\HGts%&%1#'hY(!l~H9fǰ0hr,"~%])?Gu%AP8Qjٳ$Ah{RjT#Դ#_WkRp=%^ֶkl\?Oؒ^lu%ج_&l$l$fS35˯Tj[C`ytOxga:~4/NSMl+ KA;ԇ~<~ďHSQҋG~Z?bz˻367],~˻q{G]x^ӬiElE zUX˘JJkԢ|5RehONƵ@[xo娐obWB/ZC7{&V yoo3e dK}q-OfŮOzo>;F7Ï#Osu-S%J}ƹuu\u*QgӲǘ4hbDڵ_Nmo8;WTQ(kN%si~(<76iPquF!!X(D "2(=3A5剰nM;IZz-CSvQn Am$4mu(8/dƍsq+ 3R & )PiL|HC,XJ-g2ͻ6-0ƹ%5{*1ybn\ތ{c# ^#ߘ'x!Y.-\֊ɘ4Z+"%#ee(XSkZQ($>>xzXIAhR>҅N5]Z"N8=䓷é'HSyϽ.7_ Fɏcbl<'a^Db"̬UG>fynf?1 S ۛ+StC2g:?]V>ŗ??GYg2#O bMl~}̍ckyGX;oKgVk*.)2EPɂg-2>ڭEsQc-=.vBB^&h7Q'[*eD'}[{)itڭ y"$Soj7Z(4 rfZL;#TwwE`RF$j ";H\g+$ AP r> 9ݰmaZ`M<̩d-sObfX,F->;\ޫ7cuWBϊO0 sCvyI[ZJbLzٟ-,(\#w`Ƭ"LI!OO;Pq/_0ɗ"(g[tϦ|bVźhglU ͥ T47X% Y+VKy;AE1#MRH5彨OdVt1(g@;ҚS-+I";D_eRe!P3>hӊG.+$G(' =Rd].^ [< U3PR~<J~Q[u|/\Ĕ*˓+VGM4cu!}<_xxIjx5'uA}<9x vMkxLx2ơHJq@Bş9gDV@䭟z=PD Q1G-RDbj>NqJy5w hh턛BzExH8?T;"BA]yl!xGʓͅV%=%Wy(4`NBZXƂFXhe$0X.L0F+gʤ08(*8>|/zno<w~@4 :Oȭ] ǧq'#< R,/xb,oVm#d(-P2;C\N7jdA3 ݟ F-1Zbİ%of(imCfp)ZjC=LeX(@%rTz-m} }=]O?(;]t}G]*9`A x2kv~X(Or e9N r<(AαB8YP!Aϊo?8~ȥjȉ ؏bj![Q`ZKJ~X7/2_.?,Iޡݺ"lmTP. N"-ec}o|V~^ }tr8k:xf ދ ?Q%o%ʳ"jX_X޼u|NFU(h>4+_#_ fc,<~ML~R YUyF6{ [(q6NH70kŤ2޵ÕB `&euT5>LJKn5}RLE0A>FR[CTˣ5ܤo×,wSW.ip$!Q"L50-gz0skK{:9~6c3i'Ķ{h zT(,=8hCR?:DAYeWn&}~gAkU;֐ʱ4n RV] )]  i?mY҃g2(>ѥ6\vR1}AQ/C 'K^W^'+8h?줊(dgW [3|ͼY7gCF11rL0W+EesIs v;z7%:c$7a dAMnJ`w^0JiVKŜ tQ8vj8i/> ;Ib4|TUUy)8+yru"WaWS3)IR/gP\Wy$H56LP%BlBX=œ%fD Bu)$'t*]|0c ^>Ǵb_mRaAq~m`>輹9 Ey5_>`܉ӣ7>ɿ GK#w~rwogտp~ư']:9<ġcqxo_-/.E Z|wnC,ԎβS2gYLDv|b‡zi`jX.'z!&rm1 W sPS@` ]lp{ -ɒA`)G}NAJ!b\2Ce:=*EDj|{vo PbbvmX[3{K\ n 7VPmZ%Q (nt6Ejc)S7J.)M`/LVr/}w@G?ˇNjH;KFdp|x[ڙ=͏ -$Zx[A%#mv_E q$4VNO2 \(E}ˠSfQ;efP-dɠ! 14Ql +s~ Ԏ|IK MIo$GzUEr 7Ru!i'E,.2S!F邹Ůp]ǔ.rVZZ*4p%KDOnh\es_nwk^ /f/iF{|/f3:)o <\/;P&p||\v 2_& Ntd[B;& 9 eH\dH BQw&hlq:iDzVo})_tHH}&9и+GdPLWǫd͛ۥOU./֤WCM0igI:'_tbձ[C[D9F;YR:ſo |/,Bw!)-ArJGo1^r\@ y&ڹj)fb(p_dMk(EF6Fɯ/,~; #4 &DC bKROd qJPSzIQ觃oEfQہ輻o5GjEVO.@r*:j>b 'X49bނ6{b5!z0RbS jHuLj -٤1Wt/{F+-8Z; sq12~ꩄ߿ BkRe3+Rd^i" m32ޛzS"^Z1&vj>Zȸ͸m3B)BntՉ*{tz.u\".uvz3  DȸU4ܽ<>w_==0wsxOf{i`#jeGl#ڨS`2=f\/1&QaG;"O}/杻z8 b~FQf>1BAl$rBH};>AէD癃a O#ЬFWCgiF6UZ{ב!"SBȡ[hd_" ab"lERhh%0!( ȔPsq ޼GR38akWL8bg+*b d71SeZ⍠4$&א&R$5yEyrCXLG08Mjg(TTa2$ͽל CΉ` HC bcrSAR9YBMFO% 0d.SgwLWfuP{DduFI7F94+KNNBIoWDlDL|JMU宬nՒl?ׁή(g+E& U`gE&ĘrTW5Cт5U-,ԃUzohBrdHBAR/1hJ9*&2иx ?Tc|S.'œ$+tKAIRܖ3O 9W : '0=8gsϺD }W7PAX Qh\xSx+d9`JKR c4' mi(9uӁϋjE!*e!\<esޔ;}j)$b9)]pԓ_hmܺ$.M}Gܺ9A [7=̭ۅ|*^Q ^о@a--d8aJpKWcZ8v%3myi1-"LY(Ct zyϳ2m㻟 !W%ޅcJB#. =P a^y2bB/#4P#=Ojm"_oCUGoK4>eAn4yu(cҐC\{^Vef`F^$)ؽ>IL{\x}v -S*Z(3Ily;ah5QrwQ:5 8,hMz xn5bbzVnL%ͫGLM~uaǔw(l~ҁ^ ,H n( y>+LfB!H=C t,y[im"S_;/65pVkdKG lӄ-i–l`' [KM(lB' fLB*ǘ0\ S02R(q( djdN6)ZtrZ``RQd\0)J C")V o7r6^NHdxvM ʃ $+.:~%ޤ LKjlHf~kT bnePL vq8}>phNնEY7kYFnĨN;X=;HYSHօ|*S{ WSnĨN;XcփYHօ|*SSGnĨN;X1H@Jm9+֭ U4I85 h{4ɝ *RszUI4C8 E'%ZC`en =hg6 /cE^$=(Xga wߋFK8Jb}D!jX O<&v4p($jpP';78d`)wjCMR CuPSLiOSF`"Vi -JPۄbFՁ'ۘt:rpKgtVuz$NvHQبI9HDΉ :dR54V8'yJBi*9q!;zWI,ջ:E~ jV\'L*|j3έ$m>(kB?ݗz&l <5)Da }H0dܪpe=lr ,ۃ7j,xu){`m']K"oA].lq!o)AGwſ#ԫGyWo.^}WjWqIE_}QcL< >-}SZhb a 3a.3̤q*˝@XjOi Baܺ(&c5ՙuYF)- R. ҟ}z?xMc$(E\|_X.'b| k4\z+Q!9̘Gcf&; ':[(8C[\1kH@KPRJsK(3ظ q0r<9>q%9ˠw;{'>19C rΩҁ:033`d 9yr [sb7 n_K/ {K itf Z52ðsn#׊ZD5A2 C zMn%.T1E֒qpa5R(\ĬSwοA5 qBgv2|JBp{>'SmLL&A.CI&'T#p#Մk\:,J [2[Dd ÙڂP܂Lv94=B@hfۛ3Y=eYe&7)_)lf:b"[ts}'HBx;z yshyDZMąGlZMDAWa&\ kEA;V7vÜ̯g]|Or[4oFu]R0[Vx&h=x?]usҾc^mf+D,Lѡg+e>wX"j_jr00*-Z:e~R[M*&7IVQP> 'r=\l"c%-0#[甗 ( #HK"AT6'@㴈8MH%kk l!k1oS^^1)n8,ͺr® %<>۸5a bSNk1&V ;㞄d"H@0ebFa &`~seh,l\_ 2ʃ ps@/Sm?-bC73?竧Ga}xpA݇7xL HFO/آcEop'a @3Db_Poӏo/Fd:&דۏ+@)ۣ7.c$k= Ӟòn4R5(PbtnoC[q`I̥ A~',oɍ>[( KSrcNp,N8BA:8%|ߦnufWSdI}Y7[ u+Uf(8fTߟٵvAؓ2 ^i`oVa"鲕{tTnæ/*C d9Rd:َw<oɚX Sae+w,NsXn Py4Oe@V}~w/dZr@/GrGT0VD5:,yۚ)$A>y5g øLK2$ {fƉt˂Jycjq :57y[*xd Qv‹l-U`Uf0,N1PL`-',dT: "m_w+Y? 5Pj1IAgxRScB8Ȟ[. i ks;t(Z8BBx!h-q 3gs"?Ϗe/xˢj! #\K/^P % F=9VD:Â`\b ~5nŭ"c~0D~04_s4~̯FWGfw :6}zSiz?j˨Y.f岨Y;w4UXN0FzǍ@1 20 V6,yz}Q]GOo Dn'b{!@-pyX)L0Nk{qNuN唵r3N"?xq `И] LM,+&(F-JXd\5S󿮷%y9nDtHcvntØbVUg~5 Q!qN4j'* *l#g ⥹UYOPG3d!X(eQp vffٗ Yf/@|_"E'1FB 0#JtM1aMfh.xfKL@HA AJi?_Zƍb})}J=ZB^K>jGٳivGû{ubmU%W=*(QɗwOwNfy7SQ%B}ZN.KȒ>t(&J"" Hq]Ia1ZG&L~(pDjp5T(VHdsn0ɳp5}dSEQ!G&kncpi5 /LF6Ibe7Y-I_ |V]O" 1& 18],WǘcU׹*uH/U$wQYnTtYDgç]K~8?T:vnvE+ᤘM^ynټ+0 "uܡ 3d2*4c$S!꜖鬼@ ,:^P1>pz;R v)q/.i$˟3P(9}\pԡ x054eLR< ;Jx2_]e͟-7 s@7h̓p $Tb't <`ƺo CJrr<P,K1GaҸrK Ew ^LlPHލ1Wӓe7ʋKhYuU25-ҘmizE=3 ZMx2Ż^ϻm)̊-nx]?g0맧~?DP[@`x;Z{Mrwۇˏ٥ bZn!O˭24`,&vKR! -pԨ "!cc,"|(?=$` DsAA=ǩeSkFDz{!&Ojt߼덢!:tׯ}u,r 1;| C; E7Or4"1W@ɑB&yc&w109178~j&JaLc~Lg`?пj`m-"ZC>~Xp ;E4H^ͼu.y'8ע`Bf-t6~aOulN`wk@,s,TFr81JI0www5Wc:MR!`) f(;(JQ}<5OMJȱ 0IىD43A xҽW!}M&#.$\W4v/Qc' KW0v9vta5<<2C%)!8DU_q(mE[V_?;uBQrbzbFRXeSílK+b Ԓ+w-@֕685jFdkL5Fs"Xu:jۏsk q`~tXm}a!OXX۸U}J "I]t`eS|ƛ,T&ĎB [\8Rڶ|7 ^FCcyd jധ ~.5^GrOlps}THk#r&B3sccLXR*! 99C@7|W$WΤ ^ ={Lf atF#R8܆͝jACN*+a-@"H@(KzL E6 A!A^U; Qȑf@cގ! Av~q*ז"ĝ~?fz0[ΕP0Q1YεVNQĄ:4ǚ1gK$@8 $WZcWD pHEgl̜,p:t[c5-Z4;Qpٹs$Wl 5HNLfErSk3iN9FH={-q(e0(KQ`5INA,TQHdlʛѵlX^O\6ԘUϠ75&V*M)u"!WWD0+ܫBVp)ljrir[(vjT(dpmF0$л)L~@D0X bmP&c;H{}e${[~pr,Ҋ G =T łu_I3uFl'lɺ& 6 6 @" Bc.%͝{3]yoF N9v(w6!e &V+Obn9jqR@/]GGOB|9JΏ+|tV)oZiJ\c (qʐC\4\4 엊"j#NLzNR7h.{VvI}^;{Ružy#Ćʼ-nFzU*덟4ΈՆso C+X r߻٪sH}״Cyf 8׸j]۟eo[_U?-qkls#pL@:\ʨ獳iKL䩔CRb_ TCoO7.vxIT*COJSuړk5#;֣,[9Y>)nlg⦇g&*gH Oh4 ŝV`HN짮!IidN #L^+&LЮq7\q.m>Y/""FB@,qmN>(@5-g_8 #}uΛ6BD=Η_RfK=sǹ UcϾL5rtgVHFHQ${m~ݛlzE"kmabO=Y31оY:]k1!7RRt7;.޲_ɜ nۨ <1~E?RU!!_Ȕ,/O^n~ -IcvxJ)ǍiVڭ E4I?yz]NAb#:}ݎ#<-yuk􋐑ڭ E4I(8^:Pq\b4U.ş]!~Z*(p.{q.~Ot&pBO@+'\*fd^PL2g'`O B a|.~B3yٚTm֪+ۏmm1,zg #h5A*}!;܂>=|.D/R K%` /~Z;X_,/UqcuYJYSŊyݫ60!I_g,JłK%0|T55vrE`rNhZBKL~/ SOl*F by$0K}X<.= VTKSh^iՎSAXNazVq?Zo4 zj?>?4=9Cl3vHVQN`~Z?$ڡDӾ {p%D5Hq{1VSIj\89e;s$'"LQ c*6fϓ| 8dhDc>ˉ#V[8e"kJJ([=8PWfw$mJ(x)MJ3xؕ:ЍW;C4s2 V\q "ˑd$S Aqٔ0ELsŴtcXy_|a+H+qVLajTɔAF1bC_~p_Su4qu΁MCL͇r΂uсIq#ͤ΂gS=BBV&~_?hӧ7|EkPPjj4[/Ö C?#nןڿy%. -A)4CJYdr E+_WSoL2s"rydF^4#(Vi/[c^Z9O 1 [?Fxo9 -}2xW!hFU[ܯ 󺀿D&&2Ի  %Ɂ%K/a4%f|Kع\[gKj ypx|Cr>" a EYqQF/&+#\:OZ r aBJhjoyoOn\!m&mFj{ΙKl`bFKq;8HQ{qHw1U [T %-nݏsӲl2G%wM9MuRy~E:nFw>Mb L 5*Anrk5J9b5RPRr2+ QQ@r%dLnQo㕄aɣK\ns3.R.`V4X=׷,Q.Tz˘9su1E_9VX@Dr˵LhC'$)7R޴\=KTku{4GksN K6q ֦3i& h%mlNHl,,e 'Hs$54rTX԰@Y0F5;܍!kL N`"\(ˋbY|)U$5U(')Z]|,ɹ/lMJY!|Ùa<7ޜOfQH͋HL[ |> Pv6rlF'+ηD*~Q&i (U,w!R89&y C"w4:&e WSܯq+$ԟÚz+ܻn뜄&]߮ά|ƾu>ޖg;~.*ùOc΋韜|MAM. =?-M3w|MV s͙rClnϕ`"Y߯Ƨ'E 9{}3h~-W | <ߗqOGﲫG.rjmp(̍ξfoq`}wtub0ì̐}qO2/roy˧/.c^~"cgn~_ɏݼ=_t۟]Mzs9e*{<~˖?5ݞw<ſ(r(}R;frf]|SKwY.ΟOC.wsrd'^_^ Z꣛]c Izza&ӼUJz^شlieύ޹dw0 ܱtcH BSLu-)h63gc[|(ȋu<r{ Cl| .;]t o`e]lR].({ [Bsxu^iڍz~alx9_^W@y1W? T9;#o|yf&v]!_AA8^?> /O_RW / 7_3P'q4N؜`P" CN1ptx5Z_6s7`gM>~/lm3ߴ_E`V n"M^yTf}tP~o1#2Mogr߮4m y&QWgM `*> YJl}ӳy isA;(z*!TV;?eRk5ޝtD4cX$M7o4Z"(E$N ǝnt40r:TlD&$:"D`MBIHÅ`GDx>a]#:D PD܇SS_~,$>NHQ~L$gi,)Red.Bs$'̤)EpEM94,4Y' ("o*M5xqsJJM` 㮰m"PJ/X_Vʔ6)#4I I!&h͉|z1zrzض0q=hpܸ^1,ѳRFJ!BP& aD82Q؄UՈVo]({l"O*O*BrM{:[_?*gOb65ϕʼ_80S_+ kޏb*liE@d@Z4` !3kL1-L?}) faa KL?,;B *xUGhƱ`qɹo/p+ ,N†*y 54$!&MaDcfs%DbCB&EJ XɕSq0 V)mQ[ԚH!Ԇ|c"xd?gX 0cbIEJ7Z^:>+ .%s\-)s"lRL%11KPIC=erx2ՀTDF6 H:H8 7c$9@E X. BKh5%X:Ѭ5znsM4Vʼn1E1JCZYSjET+sK#k_fP[(gcbׄZ\I J ش\D;JQb"b qhǹ[ƶιU@EI)=z e⊡p mP(~Ӆ)ynw[3*Ti'=*0yTh?VM$m5Unv[|`*'<m䓥TŮr\RRưI+w->>T ޕ[3fDo ZuLZMs)%;,y5|`kQvW_\3P{0<{+ew}M jSd H]&^:ey',q:a-74b& S5 ' Ӧ,\ w7-@[YԀQڂx"γ{!cgcYn =S&m^KPj.= p՚B܈xh,}gBbA#Jۻ+Gn~R$BXmd0R4SgD<#Y T)e$d5m1ᕭ釘0"f#`0#*Nd:Z֔[;!/*[guqGFmh1hͼeT ķ[J9]#_?ܨ2,L=|H-^ь[pXe 2ħov3uDic| FwxdmZEk{4!2C6wY%wxkZ]5Cu85Akwkޤl#x^UMzH-zp0d_hu[Q{H{n S1!2@zv`h <:V bmݪ^!bq$@8)Lm/۟u҄0%estfD =UwF[-\* 4V+5Ϗ6:Oi,Ԗh|:!OS_cF#OLőHOyd`I$ n4s\b@Ȅ oM2D~#dIa:\ KUxKz唆 ڰ,輚j_ԃ# =x69eTMiKN3Dmަ}JM[OݬHɯy9P&h)oЂmCǜZ?{۸ ݜ]}bf 2$}Y2$yvAdYHʔĈc")~R`9U%u7fw#I݈_…ּ7P`@y glF Kdu)=HzAj߾kxp d_CYzTm[>u#F ǟ.׮LGKƢou.)vԊ%޿9챂Q{v3E'^H{&ZO ql.`M-]ޅ9klr u~d+bt$_[cђsڄXe}y5`uu3w܎d.\Vmv!.ekKٜ=_r?^_0dr}RS 5o ɜO$☣EcP,?/gQZ JQѪeZMR%ag~s[cM"J- Kxek>~jZV%n_09HR PMQ%AսtG5+t0Q֬'%7Tbn^Lp٫9#q:e²y1jqDE_w;YDoLѧx1 0ٹ\}1+**eS;9vStP3Ҽi]Ӯ'.56 ^>zWov(}UNQql z%'UPPC\M "r;(ތ'$B|k3UEnJ2qŏfwO_5J2&/Á?3ft'RGRGRGRIm]102^ARlj;%\KM(dr%|pF^'w |xc_cfM/N>q0ap/Ͻ?d*`X*5%!nŘPb~}+~~~~9opBJ"`)"$8B^ wR,i*LCjV8xMQ+JV\[% 9[_?p7 X ~Wo|۹.{' [r^wBp@5 ^~]ůH P!B hʥXrIRa6x.%C:GŽUlU%0Ur&^*rEfE:T@N %s7m1R&KF߱BIa6xKLq$53!d:gFNXh-.(8J .B:zN [1\TYgX5u50/%R'WHUO#؉GT,ISABp@k>u"M X2JN թ愽)VIS=ݱ:-VfGkf7ODOb݉{ǕxI$Bly>]nfxcPspJt7 uLg %|x8Wq7I~Ƀ*G#? EL 1SoD!Џ4 0}_O8n{(iX0~,%wȆO*Ia>,(t {g$a/"F8aZS=!Rn*eXh*f!Q-ԥ m6d*fPG6s)qvۜam.px"l3^6{PMcL DelNbV󀁍0I- o.hs"4.v*0?E05$ )!]?z_}zKuz\6ihӍة-]o[d8 M{"}t]n%u.QUmE\_}Sn rC#wJ9ZP_I-Smͮ)7A1gaQۏ«.k5 uD׀PD Id5ߖ{? nábZݲ_e71ۖ}ElvAV?X>ELq@YNlؚ,MM\md 0t2,$ \UME;+44W$:,MM HI}qE.HA+12&X_8xŽ]2wBVD+ߊ|ᱟ;ap‡?3?Y9#v*@{*g%݇?Bo6ޏf!"j"94WUQhP}< [t򬒬-jViL+$ʈͤG+*)sa)A] ,|y|Y|yfvQ;WPp`/SEjE1f~22êrkڌϦ =Xj Wpi |>d:8eK^J :wnj(Zb%tRG.4P]MPzb772 o> QO/_*]ད!%Q}2?Brb;^[)RHIiVc O%*/{ZʡSS tK©E"\X<2c$DYX5,xS~&m^ҦR bC!>͏RkJjv/X:9o eX|pMDs|z]__nI΋Ku͐_bw})2C UFPD0?sgo_˖=Xx2Tx9*w(,zNJ񂈿(U\].cO]fUSqq,kdx_Ma?\3ft'GGGn݈p*Ì(|L*0jĀ  Q+451yt,v%{.dqX Cl-m9r_ ~=v*`T2NP[HmF "G{Dr%6h?5YwCo%<>$㸹1Jfy^-^="OZspc4^o䘒2)M S,e*h&@P2B:eHb=kJeXAbswJuȥVK)ۤxq' axՖZg NPR 9#tL^xqyķp1qdg)O3v\ܝLuxI zy>]nfxK`<sj<~Lg 8t*f? BO;C3=fUi!ܨhz0U:kә;|>-%s&0Qn ,`ycs牥T˰ゃƼ iҠ@D#r)MtGpMM?l?j/'Wk;xCl]jSvS ,&ub*cfE^Sh  )Q)35-1/6)Aʻ6GJQTn ]9Ϫ*YZDrT\*!FO pŎզ2rk G>6/ 9ȼ|'q, XA.iXv3sb$ӻNY>px oTx4NͭN &~ZxCQ%;a3݁1!(UC8H)bX\0 50QࣧH D +ՇFycICRx.mW &kQҌWΪ= jbbۯ@Wբ2V#ʏΣ^$4Hm]]Rn#Օ9FFv! N1cI[g#uE0[8`JPLKAM(>U_8l<|x 0@rNSR!Y ISwb]b EW)5Q3}-YJmPv t;LGjR 薜ZnIai(pJ(2-ֵ"'D)'*NDInL2{t"JA@w\L+D!ЫP1 jhm[U('BG$ljCYMI]=;+Ne,tkǓimH8ildp \q}R+#LRs0C?e,0;Msg0R|N=޾ rk ]2*/,TF?wpHD !M8'MIv?fn#D0jE=x :*D TicQeAjD )Nݺ- _4lϱƠgښ۸_aewW!e9TL\qaR-s-ZrI/м4&}ʼnv7}ppwRF9!h(gB O3'8τ2%M\>%[ye[J(ZiB| @@6 'C-2tJ:+F߼F)lS6 z:|>l~^3@T;TUK0Rc=AJ[L؇˻ 71)4/ӹ,2.xMiDىdBHJΉ1JF%dhe|(kjlQS4( =&llWw;' H6l x9VR11 3Pzn$Ky79#u " P$H݊aXr>"V]09~_Cs޴/$9QKU `D.Oo'lREI83.c3 UNAid(f$ Q, _G}jjI ># HC! 0ﴜ.F!GX3>N}X>P"p(k8G[4bẏE学3$ͅU_t4eYvoܪT~oIiĝE#΁ KPp0p09~7V wڃۖm~u؆w_I`7#N^1mR]lJd Ao=aGyQʔ"y}5LW5ILܯG-0,Rs:{z_+y_^+t򙛬Xtyy6SWxlӟ9Ԑ>ewrZ"<0MXRp*?Iz5zm{\4B^S &Tn-;3-,EԅwĺLtoTYڜ +O8Eu>D}t-jϹotb2t|Qź6ZB[7l GVyCkL yցuuºbtHF%?BkNI)oκ_j!/|^>O2kr]YhL(*(`( p?]3mAB4qG%o/m4k8}oG+}|~NK=w7;rA}ЧV9ڪj!y^b|?ZΝ{[tm!SG(K3fW܋[,ID[$r^X RW%|^ug6#l+AFЩFHfS3dҵ391.tCM_?Yp啛%&1:ҫmZxBzb)OSmHS#I(U{#T?lLDs\ %^b:m- 譕xkc\pܘN\/ƺɛimJJ"?n4FޝȻ3ywFz̢T"J&qH$C8G az ̤F ss  }+M GwЫY6>Gg|0sDK^`9 6 Uq)8bBQe̎pR-py3rM',8lP; 2}@`-KS88&(nr\x8hG~'z uZJini_q:ADs^ELG(FHj3FZ~4W㳳lLd Xڰ#"X#Hfq bNd .bCJ #ҧr6q4N({{F#:NĔLVVI;B\~""$Fp»<0-5I`4ZGwk)XCQ8 |)}i)CUWPCt9?m'G(2:.]d*{l-e{?V-CVb  *vƔ{g5`\v8"E)m#, MŨw^ֺUme3f PX{Mlypc,ծґ64lXҺMp1"`@d!F5`@5U&ٹfUFznK5\ ρw9LpNx:Qs)LmzTՀ^!ѠpHȡ9*Mڮ1uԭqdcxI o0Qpr A ΄OӣKp(:*=Q!ґF4lD'9iHE$iC "Nq*fI >QiƂ!9GcI1ÝyA#9JN;)P"y}&9nTNG'fg_5⍝E NC+2fP7vNxc WUM!ӸNZVlfVf"][OR5Nh;01n_O|mm> 7[TE jG0$q"*zeh6$5~yhsY<ތM*`R8N,jl<yenyvoOR%3 &r@L8F 0e )%1'P*ˉ %/Y^ kRlת݀*ob]ʲ7nf\ݚW~AA[@Li'f3i(fƐ3_CoS%){" "զtUnoKo[?iD*,%) FFu%B,DXpfs! 1e,uۻ?I N>-n7sT$ y&U9aJ22#bb(c-XHTMe1vP+_#Ĕ Z'ZoyyTobrP.%3)G KRHe(0 O迅Yrw}-a _h}iVS`]XMaLa{,z#@8EgeI Ӗ@8P(u?AVcjlO2Ojfί/\${ so^bG߯)D*bNi84[=D/(D9JdT`&uS0G]C;۵oo[ܫIiUN&4Wx'l-i揥c>gd;W;>KiG`&~1 ݜǧx#}´:o٩vpIû8cj~2OZhM^-&(VB@= d/TE ,-:WУ߿8j|<ك'/WntspIA"B$ QR VHq8=kc<_r|lԕhxFjzo[N)4R8FR8R(2}?>}7jAk`d ͥu28OdD2"R,"&Gl,K5嵮k䐺5UႢDmQQ-O/ 9wi`p`L'JjS4b~vNgRE`Kщ?O-\9QH(>LV}+Eޮv6?FL~uԴË3q诓<Ȑ굃ƛ?0 q]e#JldZc$;qv-GLLlʂ׌`ϣ1 p,GdpV\.}Juc=\o[K1r!k*m3n/1Vpash.)~ˉ$@ u7H5\9ϑa@vЦDZ>Wq5@8]1&g!GnvFv9ң|B[ GcLf:š#n96CاvZ<.Fx쾼nڷ.o־uzmSn]ޭG`l*^p.ϟ>qB݅.ϩޕ$"ew0K)Àg0ؙƸ{e "OY;$TIRTU%0$,fE~GVdD#3F*x{tH].`Ag5noҨ}.Va"6[@[P}ԔmTA( XjmRDŽnGĉBF3B6[zoJܿÛko~eˎ'WQ [Jo;7J<BX10N7J m,'ќOn#^2XN3È5ߜ2J(4R6&z"A3 8$%WY"M!.ĿX`'qe@ve Eֈ6FD$=x"TĂ!H&4~ bd ?^NޡzKj P-H-S)L՘V W e:P 2r}ᴒHK.eL'Ce^H#kczjyӼ5׸EUChP}-CY3Sy^z,:r&V[; DoZYbV 1jpQ`T &FV`wIH`k2Ѩܐ3UB)FcgBRJNpfpU8U.)q9Aӵzw~}u Fvֳ?VIˈ&;k!`DϵzEj\t?p@ˆ.\]npV䋣e?Rm8Q!Mar7]]8,btdhtt7@Jy{_7?gkswpMr\a?1i3lK\}?sswQ'&nx'S[R.(4C}_?&0c~w)rp~l{:ILrTt' Vg׍|"La<ǵ菷Q <Q5,Dz[yo(0ӴE|_V{m/hOI c}OW~ecLYsoi?_!Ȉ$&dd'|}:Ü:f|7,ECLg>%.I`(j$aȨm4I/('|B[ZSmŃћ)/[ios;a 7[PB~Zh?j$=k$ o "(Yd(W(y~Y}vMT1ɻn//6<b}kr&M'xB\*׏HB$~fyKVtb`N z_FO߁bQv3mԨleL{ ~M)%HC_kqزa[Y[K/ќĘ]i~&p @ފ="4@հ_mP#}-WSD~9F%j٣yous &zkGSU-آ:*mIUuĚΫ{0bZZ~QQ^wŁQ95tE?L='NWT]Ee{1v}Uik_t^BkU3qϙn%#}Z3y 2@| s Px"rH8=s'2q0% s\2l%hoA`T8EAJÈxV&gKDF퍪Nd|E>dT3ZE}@dM͂9_٠5Q jEФ- \$Okơ 袼(0k Ldz1jt,qaLEy0i (qƐ<NHme0BA_=CP.&83|l(9X4J-TSM4H AuF9~:d2E,rj42ʬ \Z"3j!1I˴Gny9ocN{J8Z {Mĝ*$5A)PÉ*MR>0׹߆~đra=$Ͷ ~wǿp?[6%c`~ڸ`<(N?]rd ;w+Ɠ9_~B_)~J'o~ʎ'߹Ck>0ȠD6<(p&{xՌBETՌ 탉84b}CBH?^bAӚ)}<؊d 6-B>֛- ?%St<7-ihx1T+F+ FKDܬŹD&Ѥm RDwJwؙZ H\Htpg4 :wl6MBo1Q$o< CYNfʏ϶)_ysEåLV\UATaZ4ye9trkGw"jswb+QT;a :w]8w}FЊ4|Wsg`դyb1F01b:)!2 Pg|R06(]:-DIQH9k%o L3D10L3РI9ݗRGD%'@ќuF)gh N]MĆAA~@wӠx4D/oBfW1qw9ګom#Gl&tm=iM!X7f@LnK;-;*SBu#A;;VSS2Ԯd/`S|@'Sdp\CM g2asVGsb:7 Tp.*awKX&I\+ɹ~UB?Ѫqp5Zepm5~ru·um,zOIi 6PTF.&NTQ-Q(1?Ȩp!Jqw-%]kѦqT)PvnCa.$;*׻8[n 8VAޣv{:gy L՗*n]Hw.U2{k Ǡj1({nOEHh۷vhvBBs[d>z󢒰%~s7mMn0AŨb0[r[PTI"JRY, ɢ XɓKmI.{:IԪ]̠koBXkynS<,gCQ-+o1඼b~I_uPfR~=YO5$0#{!`T b'`Zy;Dse\*{e mKY 't2Bđ%XO5! Gy;Źz@o3x( A@nZr'i"ef )sr .;κ([L-%rSNLw@]6 *Ɍ6ۻadqbIĘ>$ n\,fȆ`cݵ_Y~ '~c4W􉦰,yHaJLgl<9[VO0E4O)N!+[+<0e:#TvYH-zqҡ)\ jWtm7[\W4%E?vk6MKFN_9DdB ?0ApCg>I6qhg )9Gqm=^WeP%#lij'SQC+F193KF`F+;-[F~]|҄ħWW#wiLʕJ OW?' :9r's|Y8G*k[A"vwlaӜ5?\3}t Ւ<nU0aVuækv  Q@0 ҈|>Yz D#h8i퍪:|E> Kl>#3sG#މ]k4<yc y5[\m]wl0v"?-m˫: >fuVI{msݝXF$(V̥@ޒR$WQXD@'HMGǔs`JujT8lGܵ&$[4.(Zxe Ánh.2r̙=qàh!b6K~JBRY_%}gI%Wo%j„N Y2&]TrC _ ZX.@.ӎ:`J|\(2)2HˢPmȷ{ ?civ# $ >Ր(Wfo]!> QhFj`/^IQu F#8pj`"_9PBw^r%vtdr\ݐK=iΦY͗Ya~9sw - jS{Oܵtdoh:5>UDs7=fQkᝤ֮Etj톞Z凇 ^ѩ= /{R*C$NiP4 Ne|M!6n L~] ]Ss )Rk:O)h\xC$ B|p@D4ne'H/nPmt^޻ph±HJ/8r [#\Rg&A­ vm& njbHG!]qQ@]r8 +aGM [ @~?ɒJ&4AdHNޣ,{#G38'0x Le" g8ηщ=Zzp`2 QZz}ڞ`Gpp]r9*.7nWI#uz!H Mi.V٭-2SZވ ;3#zONyz_;[ c$ =J+Vh&CFY'T6)ϔSSϹ8Or;kbʗ Ռ+y4QE:$q;G!q}(D1gų~::K߇Qҙ;]VF'd"J(-zI0Ac=HH4ICI5N&BDYhIf|j pJ:v$ZGClx3?M&ǜ``Ah8!(/eFO]=-9!(d۱ A[PAΰO?,&z7#b¥SܜEE>w0LB931W4BaXs ]Т4:7åL^S5<ݤ#(~qyS0#0!TRIEf1eY!\-d-!RnY%:c{{d}\ZɗkG^5E?k{s{#)tВW7|qLs$ls_OSzrދ4+ qr*d2+t)$H!`TSo+4vQ~STP("U)iT>65gvg9KE-aW[zz;՟We Bb^Y"ݖuh 4ZץPƎcԝv|Vr4W`8?%] {MPʍ\owϦ%(NU뒗vȿK^q7&HewP mB]/E)MD՞zIo߽[~4?+?ݚ;h˃?VUU5sKrP,nL`zktkow緜}plrymS|ڕ@Ex,&]PIUӺQF+ 㓟_6.FQx6i6R8J8BR& S fa>IKF ]KHh*&>NZ7\0ŗ6(gTYl h!$ -DtOE-+K=3ɏ+(ZW$&LO#wu&87ϩL?6k}^ɺgzf4^{zjWنT.s Nu0bXD?$\#/3ޙovqS}}9:Z7L5ʐy Ù\s|6Zv~=Y be^^ˍgK zhl~3v(:[^NzI_|w[ZBEs(C9Q "2%Tt"^5iOuAi=@DN?obYYuS^szdo7Es?-noZF4ge+ P,%pE냟AףBS/IC:w_=WXZ@_-_|=|׿PkWqi}1wP_ EG \Q2\_y OYCa5fb*EѼUl!{Ѯ~zWAUL1F<#ʜ-Z9~~U ֗(ѯZ9 ^~u\DtZ+°‰ik$ (h2nw(Ԫdogad5#BEv/Mfڏe|NF#N[!f:ՈYcF ABbNo)fsJlM)2,yd*KQ&+3Ɓ_ٌXd,:JPɌ**X+ε%Ĉ J[QFтW3B{AXМu`PVi@n4򄶷APkg+_JY.QyI 2̲J`@IB%c#Ti+56̤uJ"2Mԅʰ|Q%l#(ɵdT"Bpi E),)yidR 0pw,7GrYڸ5hyb4;`#'v.:'0 Z&/O)%})֋1R\#NFa6=& '%\y4z)kL6OҦ.:mA Qs"^gZz G0-^Iǃ9/$ E{rċD )Ōń; |lqA1&1FC,!Ƭ +(}g`,ӖQGR$"[J )iÝkSeh'M52M.)Nlrq+:ͭd%ӋWuB\jHSM ا=2mČSdx,`~n:'ZI@'Ñ~"8ql5S>1Fʐ *|趭Ͻ|Lk]6 TUv.h@Pq=ѩŔt1>p)1CP R3[6^4QcfoͿi8+oͥOuel*d"YA+1ŊLf儛fu Ko6cYz^y,^\2 M-r\[Sb]zx)|yeiέ2KX] 0NT8x/Q*xɥJL݋"/ -0ҥ,@%Q˨AUe˒TAS$Q͟^Ih~~ung}P fmn+$%*(I[qlbRԔ@}\ Q$WtAYn羴g--(*HK {3'Vi8lQVHyQjnf)Ș-Rkg**\lu?!!*0Z@PpVU4gځRTVB!U)r6b֚$ <`Ahy ffw W,} /VA. թ% @ֺ*Mgt ?uXU=vp{HsΰuakBvi-ޥ z0J"8(R+AW%%"9x0)f<a#Is/#r!.~!.Ab\)G@ )`F"2B &#L؂J%J^jQJѮؤr;`U6a5Umoq<_1kgSc>>|+|Z-@Ѵ%Fu]1Fڐ1@8M|lơ]Ƨ(QOnBًE"N,JRl( i6Dɝ B[$b*+(c !ӺPK NfR7LPS Gݤo͡O֔K.IQooRℰr⼃0Yx_s.\"!],^-nu"5pxdxK`N_9,>{Ձ_rר ,?r5"IqBHB@vaTVx~~NZX-Bn8yb>zT[%=(+]EZ/\D˔Pzݾyz$M*}{wq*@9L n^zu(Q6yG7;L}~r`!hUEc f0aQT:*3e7cH/!?+/G1CIvMr'8ClVqnSLwiL66\,=lwHpcy'U^np3gR2E7c&rə̈́M2.S,n.WYUk)΄RN `J)4ASF[ Gr_pjQfqef [$>yz8UscXvB ϶$X9Nx+ HZgrr"pzy2`"2[^ε8-reL{"ՋW/^ z1"h ԎK !dCHͱ9j.5aDbn%_>?8'oBBY(pd]C*{/om*M⍥w;QȘSjr4bB0P'3 r3c@!"j;Ab҉X5)K2bDb™ڧVfqG ~ Q8 ĩ9̼ų ΄dH{byȽC0j pOt` >XMpY<฻ݪZt `B%LW=hhsFNuFxC3@$fO>"NOFN|'mH0 x=3C$cBr@I!"kvWz)30@ -MAy֬F^+RƔ"!ą֖1I!H#ȵTJ' U-S{yp3prU0SX5r`B vֹP͠T! eAA8i 1V"JPY F @X4!)K4y))) õV"ѐ& ) )y{)Ji܋h5T\1^abm_""RC@;]t,JKjx^e:CpWͣ N[B"(1JoIB:`^fӥT/u B K @wpee#;ͭȁ&dwdqi BRv[ }To EdRYqn&4i*ДK)X;%Xv.$2:}ֵAF)++w"i:_vv˫$X2O`.(:Hր4 r^_͂nV[Y|Wzo2b};&Jۉ_f^Իʘ5O-TLip݄;@߷p_TAY4RBRI{sra7w9:|[b DatlG/Fl UbrDp6ľ1˗!Nl9Qa T+qvyZ OTLyÁDZ @Y?6<>"m0S"*JTZ1J`fo F)ι%8k$-PC}B bZzZ<dρ%61M%92~[]v2_.JIZq"omy>wgyrv"EX:+/Bc*}QVť <-Cd~-8)K~kVfܗVp= "0Nw"9e֟7Xbn?xUvwy9ZiϯWS}m:EEZ)ՏQ nBv,ͰM+,* GZ?ONSSkscU hjMRD;&0oa&U߿F?1FA[[Nlr[0brge˖K+5Aʷ|V>l=Nş},*0s9Bh<@nQ8Q &q_m |P_ͪzɮ#}<7کIzRA&|5K8TDs<_fL`4{g"8EǦX! Vv?`1 30 7$d.U|DtQ-5d/l%`{Jь $9?[Ҫd~X2 =20'y {y>Oᚘz`pu;u_MHN;e$XQR(ݼoO%%PI=xH[zْ1 ʣօq4=km&S&H;ɪ?_˧'?VvQ-kcVMp ֤QUgwZ>:_'՗{n~Qw,xj ޵[%C3گچoߝAPגن={PT?ݻ0oҮ0K0蚣yA^ywF+JB(GdT,cf⾇g>(f$Q1rn:==@E!Q$ <p\'Cm=t$O< [翗:=.R1=ƽMK{ǒRdYiNh@ր{D;+qDd[\MvFn4[-nu՘$1N^M~.cC㏪AhZe tR߿ޠMrxhp^nn[IxL(<@vQzKyx.= :<#Tnh)O|in4H5h-hDb-?cvkCBp͒)pfg5ÇveƂb>ĝD|hE*G/o&1̹4HQ{k9G4O`܆|"%S8Q qJwor$]CCP 0-#vt 74@Ս*XZx~ KKt%!wE/d[-CF;ڵ_²oE<G`oT*eKԳ͟JMK wW* ݁k+ŀsS)"eͅV ̉0?qT=y1ws]tRbmQ&AP mÅ 㑌*@4070h=I/{մ8xCJ8+LC.E( : Sf@&0Xtڕl$o7?M_n*sE AҚ:" oL@(jo; J @¥F2eQCH{_68~ʛDv% ?إ_<ܭnOoGR*2{ PsG#NXGsSQINd wPo,;{LZ"ꗷOMQәws8Ei;CYTy c}FO=ʴڣLzHmu-;|W˲VfǼ7dT<16'zK73u\v6Dz;$Tupnź۰)!䆡58vDr|zw6B9=5};Ow@{GR }F7!*O#["%v {H:̯×|HZnGbCXQ|0/ "˹g'DD*e(G30Ͱ~b(Vtv>g=MIYJLS K6KH ֞`qx#~ <޹B:1P損Q$ј}1#:<ܻq{A$ULF=2F,  M6i-O6-8ad: zt1%DNp`2R: ɿ?CSt}{ Iv,$ku0GsMd͡Ex:z .)rר n"u/"^pmH.Y2h77qe Q`6)"=v2_Bp͒gvvcQ[. RD'w:m@7.;Q!!_fcw[\?v˥AQG #ٻq,W\~靥\*jT@)~P)l_6tOwѿ;%k<<~w*=Y}\ƷKTFAׇ`fvҫ8b15z.Xh Y[< u0: c*().4@_f-yf|V,o32t#IAzW_{#QT(ם.k@ J ɯ(R&&!E]zʩ1COBQ)S wA40缨 *J)T "cJiivϿcW0BXZcs! Jג"H@ gj1 a s=I f5 &йOYfYe6Ln+٭5&yXu6~JsFv!QuOlLR{@5z=_cϧw;LP8OZO\hupQs|}ϯ+?-W-%VFSLȈD:A0J[F,Uɑ$eUb\<0֑"8TQb8D٫uYآ5/g^g \Yi~j2W!y[ȈE~]Ozg~]fWgǷk7}{~_qdϧz$咖¼~^#,Apf@Ƃ$T&cIb4!`*װBڔ$!}g\Br)SSx< _W<{M曆v^Qk/Y- "*P, JSffJXL%24T,v&Nu}g7ߑ 3xl'/w` 4l0r;@SJM 7省i`ime&2'0 ] @$Չհ@HRNj-P\He,|*Տu(^Qu3qKSGڣ )@,%moJ>SN1Iyˎ~F^g)6܇R`/Vrٰ~ BKX۔$HIJAyd1ާ&פq3 uD(qlHaDi^>- #l%*j*yYAimY%<4*Jg 1C|s,LDSݺ*jKs&ĨbDySr95R>w {|}yF*O%;cZ\5O6+l-8X`"mtPBޓ)@$u/J1_;,0hSq=ǥquNW%Bo'𶍅bo~sE2yyc72pԉOic6AJ2&2F?92_ryBzbp}q;^?y[yr_X(ic9Ǚ,s9|D1MUĉv*#"T;C&|c>/g}`Nkm^,yS MfWu'J'$W&Ӕ*:.I `޸HnQA4"›I UHUh@O RNψ{[2%S̬Nı`T ff@bkDb3qgTaSjA@^Gۦ'߃#RJ~ 8hi i4'$2)(4ބEa&CH8B2d,[sJeedIc%Cfψ kNIHl<2C\(R cS3ˠAy xEM̲8r14MpEntZ:U׶ AL# ?(ƩpIMc"RLS2 3$a,C9I!H5rrDIW#'bq?xw}j:2SwsgZceTz$YzT{`Aǿw0\JFP!lz~|xص(zNh|61Yסû݂P Ե[1zx{;D!\T n/U@{z"4Рp==!t^%U]_ݯ1ݝk+@;g剝$@Ƙv|hmU#z88 f\ 9uOzv@qvwDSD'h`Խ= rFv]V ywc!Bdf$n;3Y n^ӃC۞a8\P'i)zx:'K+|2Vxt ŨDѧ/Nތ|zVw{F9 ITney `pz SN7ñr@k(N8SgD=K!t{ )Rjzb/XD) ٞ*ΡǢp흀*RhnHx:X-ᘧpl"Ip EʾuKIlD5]Vyf|;*'A Lgѫ@t.7@WMMIkˆP# P\ޑHUy,Jكq* ]Q}bhu (kYZ;w]Oǒ(.;L%eCMQbRtR F 0T+4FolT XywFsq%vUKVn<|[z2tbY3Dk-[ Wb(!eZDP&; Bך9>} ˩"eDTM tPhq]'@S>c\k󢀔=uX=$0qr%h'6zy3.}R Q.(ntrT(LE?rꙊFY]W͙3B ]խ9epn6N9Z9(*L0ƑbL1QT;'i1]p m-q+9rNH"4x[pԭ:cEPk/+W,9'GOYN5"z}C3g<2gd `nM*n8hZT lxh8f;9/B]f\;#BR1}Gr*#@o[gFV*сŇ1stƛr0a~OwB?|}vqs_q$QB6p(SQ0&Z 鍴V B[wI"PKvJ칳^9IJ&c'L+/a92D ńIFk[@@m4c']@#JZs-6ayV,c -W~`@޴Gxx܎d:i^!;7K(b`BC7F)#)(5(˃ R@j'R2U3j10[qYϕ:/g;/y떟μo}̼͢0yLoJԲ=[4kڱoAp[OH{ziǫ:v|>~mh߈ DV" \B` qgVJ-tpEX A_v S Bk|B^k^Sxvryqw$w׊:y|_ ǘӓ74z:v<+CnW?OW}Z0LY G%iL:6ÜEmx(( XٍLW. &#dƾݨ^_]I`uDy@XUd [DJQ9#Kn&6 E ,!?t0A"\Z`Vza1aްe@!IdxR!qtB=NIߥ^W%;)85ZqVIJ{]S#H#TFJ]$ ǟ_;0one mKD Ko-8ltO$&V2Cd+>Ү ϲÇ&sg0[ЇGt}%. ,"-]}eJ&7;(GʋN?nP 8"׆%\ j")\70mj XE X!6cҚR1y>`4/Vu-U5a-VP{Њz=Lw Ч w]xk&Kb"YiLr .O]?5]coԉDUH#'os(G=3Rz͞a$<-+i~NEb%peqV.wxP\o?)͔jM̩ұX5EO<O*K2wrRJq*~N&GgW6 sZdNU̎jY7VH'|ύT>9ܪ&tT:*0nΔ5ɧ}54Cp :/evc-i_(E1V RN5 wM7Ud${ƳtzT+[xnaO9Nf^ϊsK;\[at+aȼܒ,_3EyfEOIBK3biTk{cC^pY-_;B~#1,PAthG1̞'lRUP mO:^4 +ąTej5zD$6ea Q q11x[p)=qްm<ɧ3z۫WqSW(Bj>vkĮr%SL>>_{aQ_s;dO/;g2Wegn$ˎUiBLj bPXŠ_g4^nwvJ!ǣŅNT2A1[{An'O>ɍרjz .,084N^H]/J]/J]/J]/+uvH& 7:HVJD18Iק) cHf L~y~nݏF򮞲]ާbLG)$5/҃ɇ((¢#螜%qȬ]$UsC$qeX~&FdX$()O4e("ufPii7ȩȩ.Iy9ZPdwM b5W.N :ҖCN+}-vV:PiN0hIIAcT(5F.ύmA'{}VdSgAVrT`DL)+J!oޭF~jEz{1`&#>/5o8cE[~:3].e,>}o$l=[N"8'Mv\|C L'A5G h< iz agTsl{l~0qn=XRx= Nw ," ྛUFws11^_jG57"~C*H p{)EqGbɆrKICB`t"9F￿yw/iE5!<?/o^f<0~xwۭxS;?^^fWCb\^|{3#?'kT )0ǹ d>xgG_N[?qqqno ñz5E*z:z@?-Oۇ9\\Nxt@&@d!2 { Qj[ږ_K`(qEڗlsL]^IʷH|tv}Q8:άqoIy7YCXqVĉY:,ECn~}AL/ UfzbQ>09ۓ#)C$E6.gb8\m}N[?sϫw`kםQvj#G{wn~O;nMb $8aZpCNi3L[}^(&t0DƼ8a߅3}c`[G-!X FFxΣL`1 NKczQ1`SΆ Nx0 DTɭeBI=0 (DkUnWH~1(*E%3R8/ pd{ ޗ& i{ "\o 1m[q\in?uI41ɖ$rjOC %N%YQ|͊DլpwҬC?? 3+&T\4M38,'fovӊDR RXB!c:/Fd+&jXgnE6EDkF`V^qu+ tJFe^#[ MȦ7tI./}L ^I _z38hʹ.%[y_+DPKa$UD=wqJYM<8`1yΪnBw4 ,V@:8L򤿊74o).f VktL p#d)!ڡ&vY联@C@%A]cFd"\t'B } *JByAl)"B4j5&3&ppH[1-]5y-HlMyI/Rٚ¼:f3t1,^w?\Ի}zqVQoFG"X^?yT2+LQ$M)Fa҂J0)Cok.N+LZRi rE Ղ&- 2a\Uhn6wDfEN,lVTĊ%:AY1hJpl)0VpsP:sXg0,,4wZ S)Xk#2=9^bH,ŔE6q7b-Cbw09[J-."Ö3 i Zcn~xRS-qb‡.FNz;B),XZ$RkXgnE6Eqև(R- ڣYE0Q,wSV94 |&Z`S3x 3A䅤߼~=AL'}&]jrd~'3bO~8v{e2_RB$y(Za:Xb3S*JpOʔW"L^ϧԠdgdKIh^"7@Hspy pQ`hm)#pP.H F{7 3Fy3T;#!ZM:mHj[Jh ͐ Lxfbl6Mb-x#m6}fI@_;F/|eȁ:@&iRȴLcP,q'A>Ũ1C4'H P Șgހėh`u`5WRƓTi !j1J!ȱv$(V|Ԓ : -)^5EP0lQK/ĩM/53C9 Aɉ8Ŏhٕ`[V))JDkuMs7*߹hհD۔"HUreZ~K0":'"-= )O=0L'=$#-^JK)[D#h(=Els3lĽzq|in%KA@\vinHN4rXRGO9۔ 娇!yd p;S2/n;.F%^\F%.E6Ox9ev=p"9â)EV .y9*TTiTV*Hpe8Utf"halBt#ӯA C +2pgń5YQ])0=. !&Fi~y,)Cu;|!S FZsHtdߋS~/yם~i%>G b}!@WQ {f{Hm>0r&YBίu??L))j %ovwKa %퐆hLD0F,8)0'zd"չ/7Wn0GHKh! sã|&xϭā {QHp(ND ~_1W= >~K}D PnKi4cݖRpRTbARj@ZdG"|KmE"$9 ] ѡ,n>;G!HƐkۓ0")&^q_lR(w1t+,W,TZ;O"8gx8Ip66 [(CLr([]Wj2<+ٓZ khJ1H)P(FN ͊tܧNa!N)ϵs_mMw]ffK-ƍ_᪂!+&{'JR'JÇx+{#qs.`GjH:#tB-! n Mb GA@0dV}I/Ŗ^Z-C)oґ7h~w]uy8x_݇YLxޖ%L^ɟ|GU2CVVcVx IiZ}W%wer?{F^$~1A,N0`lG;%b)d%lݒ=J\VbXU,SSG˺WeOt롏 Uw:;뽂ʩv'Tj:b˧;`5MQ%ԜϾ';$t<#5cS_WDc;b%] kI vFvF5%PIHa]ʮa5D(mO'6H0 t9RM\$JId_ t6_D!pt=] VGlj/>sR ;8mGT3'wDa_xƔY5' ÍR`*W`dk,t$`LY>aNW=J"P$pp/!sYU3Œy @,S?M6oЁw{3cs0zԄs3  O65.gJXi@l{Kl6:&B)I22i ! O3I.2r(fj@5/n^rgC f|Th鶜ärNvDK}OULSqZCjvsrݣJ(MP[PaBDJ4*N{Zw&^bLB9Z(ӧ cdL(%#&8i?J :TUض v-̧afR#\]՛P~ )r3 4Ѳ$ ~5˫fJVg@JZ}*^Ž k]:5W+/pA1iQis1]֨ dSřl9p^{ޅ>L #B-vKT_2߿(6'գ0^y~:̿8+̗e y_d<9tpzx_)s9O.V:_.JųD6(ΨEI_w~(wݡ]/A İg%F: M-^$dF逢(!T4Ex?_ۨkT;'"עÕ!(FlZqgMV{ X^-!}62[P+$ۂlQ9$-s*l"=y(Ą9\)֖αoi T ?x_Fo#n5d<ziUMϞSK|źπ5M J$$֍sH17$1+Qq [Y$l@)JZ-2'!,x=btLJ>t K-^(!2Wc*b4(~pfyn Uf2ԃG"DrnWT#7kSP4Rg y J2ī,+y۪.iXboL4-$~խOO 8`Qf>Z~+;MV]p/(yhDBX^!_oK"ՖG) $F4;3Gː[V7#{Z2\"s]&Pʒt9}ydH&g48J:X(Fc)3N)ԙz(҂* (A ·~ְpJ} bAʶJL2BR[6q^F$NEc_rQ@m90uL( ^&]3!)9MO[˶Km0޷XӠp *LwƽBBTb0 DyT&MP͛j^`IcE5ZjkSPc(^R-FKTe;,Ck,՞~ |(Dp~꽛NAmttPĂ{ 1ib4P?̋1{O4So!)8YkGa4[~$$5W?"32E3-x.y8Z=8SreAD/ YKlz8 T@Ƕ7񽶷ZW`{W#!p)5~%XFse #(c#a,`~3XΔ`$SΉ&(0-7h,^ت\{Hb\;/ʹϵ k/V)%t'7N`!0C2\;gR v-͵I"W';iqJ-Tr+(|iv,rWXq$-FI" ;՜Yf4s4{ruyXE@(jHGz J˦ NyCi͵,򯜨vTLHV ri]f'辸,4ܡ Q灝ЕiW+Q6K}K$/&3t2.gӔ?^5̈́ Q2LP}{xW ļ:/Ц%"ܗu'2~Q#%^x(Ōծ,;$T-γ d\g`-Ҝ&NֿiZAJ~69m'!7A-oZ)e9os7!@#Jecyd3N6 $4G2'繲RŧSJJ~6kisA*[3M^#zTaUn@`x8]3nokع 4?Lv,}_y%ILĎfjiXjp8FR=c+-E+_3)u&l+OZdkО@ BNp)gGoo'_[t~1] i=KVFTK\x{Ѓv8^~㋿oniF x2AZHAۖV*WrOw.j2#*JH[^Yn?׹ ~FS*suDZL/'4Mƽ>4 և{إG?'Ci28{n҉u7Xeڬ؋T쥆6+ꀨ). ;M߼﫟~Ϛ2 0lBM 9{ \6f޼pn?ߴI8еt M-K1rtzQ,:JhqV$&;j-1f'?P-K1t G$ݖA*ViުQej)8-^1$x3 ȁsJ;]1j;׷K-c,) ]K4Up$䑲24Qx o׊#+y p}I[?/rYݐU4ET)Cu+uSXSnĨNu/cՙuK?!iݚА߹&T)wuE@uK FuRǨcݎTά[zHքUtK"oht@"pBO] =ׂ`Xǜpv|y4&5Y0w{\9)1@86W1Ͻ$;{e3ANp 5z cIppp#S>ϐBX2sNm R&^"=9@ wCiX|s 7(*acg!4P8:VӬLP%IE 1>1ZH!`\x)<* ߰ E@1@{85 `iAa _ KprT?Њ.7|q_6&<, ZL}oy6;p~K_]^aDp+Ќzs{|7\/fk/&Vz2dwW0}Ni0WT$*!{J4ke1~FL~=B!f1N7`Y^: .D) ) Tx) __끬KgoV:7`XdSW \|ҷiƝN> A42]K>Mmj .tra+a{صO6$ r2}BPf {s֓a`p7?~1v|Wԕ]R"Y' c &*"/lIB)eT;J,06@d,YTB)>n(Ƽۑ J\v)ǶK"Uo =3Uٯ&Bq*OI\6wyh`J1{;~?uE>3qR2DGgϠY: 3\e׋MLo1YFɈ3ZrisaX.ax~fb~G"DlIfr'F'mQ3ϝ6F,x̱Q"rP Z +DZd4FC"q$!EH9V VRȓX q_u{!=0d$ÔQ;TV:Ti*zD#A !U?ǭƭ났9nbܺ,AmG҈i__@ ,1pe r{6D 78 +.?K'uYDM3zl4%IoeB`FXBc(Aʻ7cҷ&)j$^;P3M5ך*9/5撆ylcDys;j2"T;(;O,X2.8hWy,D&:7R!ьJX@6V5$ٿǍ)Tߕ9o;/7`ue{JiZc#Ks+p?(ٸ\}q.3ER;Oyo?x,/_~u>5~E/9Wd)-J =#CD?,ZI%96(^õ ~35;%!s>RNdA!eb 3r,F*j8˻} _2U!>GV(QU"+AB ;+t1<{#a^,EER$&i9WVdA~3Sb_3㬖EaA-H ;=`n&"bI伜+5nlb [Ej`8kuy h~ D4h2tN} ]G~~dWόbA|!0j( PlrTAA<@Į<䉴xxu |xE$'EPG$: &a{Px4Lt^u绹H,0صZPR7NK,Oo;F6w=:J߄/.eBG7~ z\^=Cs»WzLUf8=+ǝX'#TZX]{]$s甄W@wRm0o}5b Z(+ Qx4DMolzv@3T ?(Pm-Qw&RԱ +Oz[}}߁#Ysdw D&: }T̾$<=qis3+|; p:@C]A3a~J#t:>e\[pS7\wы.{[d8S;vf~_yO/; P< z_FŜ< >9H\F)W Ιh;/\ UܵEimu?\S}Jl\7y3 b&V8;E6NPAN~5Wn@ΤB. ,[ %)#["gs^q+dk}YʒS3ZMlU~Qm(0)O;䵲ECUʐ9sK:F9UEv5F(@Ԑ-XifվȨ.JikuFP6BV_4x_uJ0z//&m!TNe2;%bGɵoKn &+|],WsaNqn^ߧ`\4Veuout ) AA#ɇno&I34xBL:V/+PBi(taio;!/.aV`Vh1u0[[Ll.8S,%r$4ȷjle lfWV*:DJ1 )rJT%k#<|fkGKߜg]VTx'cbP)uVfuz>LT8 J fpaWB_0`=' L*6әfp\U͇Cmn(e#j G ?p-S¿k3,.W/O"{ [:ʨ㌉\W{69l#uqBR]+e`T5ĥ)mCk >l.YSŸnѦ>K1kQ.Mm;sDvZk:].υ'q$+HY%hַEhHiVH&?{QO=/;1HG<ۋ۫9._ :[}<xLӢ?ҥӧm5@_tkA9+e&}Һ٣de.9ˣ`0䊸n}շ Q ACk }>K/39;:}}wЅ̄5oC>>J{CbMO4C?X#QPc%|k*CEq:_7ٮ!KX}dUhene\׋ak5#/9侈lj=d1ȠBe*R1}ڦ4ԋ:@}@wN#h3ƶZsEK@МmRPjFEf/ʵ6˗P\#}3Vu갎39=++H7kפKP5 N?, $Ï@EǹMM91H_o V?rpN[[p"95qGBoXRoמF{B^WqqLZ1)~MvH9䛇z1'$TS_Mjn]{S<9(>AB1ZXJ_ ;_}uʋv?G,r|0% ԭ \rDmV&8;nS/:E5~H] X'&\*\}4cO{>Z:LF9S詻cݼDYtocvaW[)Nmpϴۭv89_`e]n_ý/,%z[4l18քfoEx(sxO?8۠i߷aKTm{0 lOA8c ֯)q~j @u`>^_d v ;X*pfMOx_߮T/g|Vˋw^+w*r<ɫaj/5i7`,3RVne:?/5<Oj[&\'+x,D֔=.<r~awagy۲F5xd9lm{TuOG]/?}ծ_V1U2}Eyv &~b[QJޒ,=3۪o| )=V~7dϞ}~nۚ^/.ED?zO)v^~tɇ \?Y)Qgz_P&w~ }8 ^Ȓny2Y?Eɗ-;-wˑJĶZlWɪ*XG3yל3/6]Gm3̟l> $M[Yyt0\dĶz"Yn1ZvWZlCNjA@`Rc?}9[)֒RFƥYJW? _㤁\hc)FS=ZZ R K (&( Bpϸʴpq,SM>^+ܧj]|ߣ􋣢a47cdGo\ϝ!_`m-]UO'l9p-m:mV8*iMNY<*h4;EFDŽ#Kk@ptQRZC1z3K7oy.aQSvI}|ӖfuU<-k׮p}C\{0+Wv\wm{V7&7&ۮPoȰB23| K%B,E@=% !:!}C4]iz)ξ+[+|=~8x,ꏾFjr>فs1\An~9yq9 #>6?}m'{hW>h5jiT$WlsA7yX-!#+A0*^z.xU _j3ջ;ol4|n Wf8MϦ,._;/M*KG3g !:#z;GL6z߄ XCF7ۻX@EOg*KnwR׉8^ޅhI2t})OBH'g 3IMҔʲd+WRI3PG]yj;ˌpN(KC%8@tAKQPi4dַWӃXOFo ,X-}sъ\PU!DS>ac/N"q?)3/y<+[L&+3N\S%̣UueGC9 Nq%8!-Gyvw*mO*}o0|pv:jWnTR*T\{rHiJ;lܛ9tR+FBE:R1K/EⓢiǭcF}?\lg;#$:|nљ\%ܾlp]˞ i@R2ԪĒbo1- J!'~@GAJĘXȄq}\Ӻ+ Ry.@_ f]<؝8mwi'zvg2Cq9s\h+WV0bQ=Fd̥[))9?Oxme/Z4w w _^f@7{4DͬFFeWfL1!8HDcW*yDTdE2.]EƖ tt$ZE|i9uLjIb%vGRjj>-uH)Jgm,A3#'ØH*?>T69ՕL'S S~\=ݕR9x7ԅ5Ȅ\v8,TFuMI(&"ҋ;_LnI2*ءiFhQ 34Ql>D(n 2YlԞgb!->iv~ EzJ;J"ZM[D9Qj ˖[Rf-DA0!-;D Ѭ-帚\])_M2HC[1vAvbZ,s 8 (OpTO.ֿV 磇*SU"V?IɫQQQQSpV#|t2.=Ȓ_LKrf1 ňao԰*p ~'y1IvK{,5tU P=ok+MNuq!/9UL~=nl3a}|;_) PXsgn:H@p+K%;t'q}[uhF6NsQb T/.ȍ6LiI)Y ;׃t%@Jγ 4Pc:M(!(bD)WQ0dZTjWӶ$A+#1SDžфqVf^LFxAǯƴၫ1IC4oN[ T,l"fAF% M, A-T||ll-M**to|yhQ,xd]8,(:[˒,$ON̨0ktzV.EF F_D>D@ioyvɣOٽˢвv!˶B%@L-FF R(x33Bg0D}qMfl&aZF>VTg.Nɢ@脶A-] -h,$",sHed^Y[h^ ^jq+I1XʖJP̸LRX\*l>kTb2I# ܢ&آ (_r`Xm|a\-ҤsTRa3Vv(լQJJp'` rtNS1ol  Z. VP~"֑pYMS.%p\qvJ ,0!Gڣ V5 /yY R"HY)+N˪h= Y &c;a96FHą4bU"s|&cDIU 5P*z/s@*]Z&J@^ J)M`Df ȉ '""\ b+b VVPS˵=YɂF6Z(c E)ܨ}rcmX9z)^qԎ$ 5Q#40ɼ^>"-w.p ^[Đ >ƐCKk}@%!!U ۥJqaaȤީ~nA. 1Nj=*x%rWCm^ʢ0P[syq9#-HJY%2Z7=)((*HVp>\! k{RZnerړj6(Ԍ_U`SUj( 4ٸT_.Ϧdbo7?5T] kȬ=ye,SJ(ՋTlQ_,ӣR=.89>fe'F[s|ܤ>+%b)WW˺8yʋ_Vm"L,~:l[x-YEo5E=c-}N<7Or$A2T\Z]mjߥj1AN\q9X 2O̰eFG3?#BM7;:.tlR` 1eF<'IJX q7m ܨg䬣Ғ~1h9m&"9A\ 2k7љxk{>.{GN9`@!,NGyGnBT,m(#*F1(/?jd[(Kc3(XJYUxzcEt6zK_|:R &K P ##v+; Jŋ$VgS3dXtB) !tdŬeoNw-L|HeQ\i9Pv͔wƸGu ޕ5鿂=Pf%zcbb4hQhY߬nl\dK!D;<29$QkA#n4FzsR6l FAwỉꈤ/#UŌA#{YbM a/|1x+9E AAq'`Q嬃\cƙjA@H4lD# PFRi*')%@k91TjLr*6 6si .艷"pQfN98>0<ie,GOS dZmc~v?dA*Ϝs?̙8^Hcg/ K`іX܎fO[I8_[cu>O2]y7g?j ׋BVNxp,.0gD#:Ovk!0g_;Qs/*7]]/{שzv2Ue lmϰ^y83=xp̀Kdu \E5_yҘG1r27f aM?onܒ`$Xhٻu{ q_Ř4ncB;K@b"'x@W*˟swgPyw DN|KWu!$4$ fw8 eLCH65B2@aoʳ ,A ڼ :խΧrj*ȓ>iQr4 iH= -}U!{ғR:uV(jЊc}+-tSP͵~φ "2rlX)3{Nq ,2om"aE=gY! }?"Q0" ˆX~m M>[ݘju]*5_HƆ}: etKgdidO/^&I}rZB+gmu)aP&p)Yx~po;qg4"1]P)z__7KU I6HI*/<@Ϟ~&zf:}p86RсP004vPY*i&CP+soiU)g 8L1k/)*G_e坍mgI r )4i d^|_cQ_^?6~v^M*:oMf >>gUlB&`|tI-3oǏ +%VPX?$\X!BmR8W&\&aIMbI2fʉB4ht?KHXv4mHt0$ϭy\F˥`t?&chȖ3nw$YLņFk:{eN-qU9ŘLLOىldGL&P;A0J0n?{8 @KJy')_p򉡹^|u>ZEHdJ8?E燩\觭S^tˍ̷y5oXt}3/YJi%Q&2lréS#cUxS/Xe)J2+u%,4EOpP*f.&HdĜ}A`{œ`"NS[ùG,D6 )*CI0A|V2xcvޘ]}:"/06\)cɂB6.!<5ɋa *ngaR>Mp6uxi,λAhmd zoGl@^y-\\ʼ)NuP.˒kx苐"_ޝ[g!]:ayRDpch)_W>zg5xV:ް4Ir׊9؝"FzK^,/B'kyz %Z&xi˗~2` /VmsJ#Z"Y@)noz͇IoŪxP2 !W%N:3'rah.Krp>zLӄP5NrJo~oA-k[;(V5rJJY>n˖f2Q"( < v8lb@Cd+;oUQosQ~,G1x1ws00ܰA^UcxO7/}pyxozW<2Oij.MK2JuD 'ָD|f"^M(ݵ6[u}å{W 4< 0:r ᳖@`gMBtfփaNع%3g: *Rq{>x>wZaF[ kBi|>8MZuzl ʦ*դZ3gT$e|7^4) ]&7.#r\w<[.n|{R.-*/zmq,܏_љΎ$j&F+T1ʘ$BA9ڴZcWT:m4kZjaFcj$pBq3l0C[ewƼ kRpUj jUZ|@R6SU`TqFM* Dڀ1j0,"#}2Ed T-F:C1RC įDب)'~$j@I,>-kv[VOݖuC,DB;Pwa2F`Cbv&@WJ a猈WijY+lh+Z<ޏ euET %%c*?Ԗg|b p΅2).Ք Fԃ^|d?0W-V4ZjRJ#YGlbX:XYɼꞿ-_FW P}.ȀtǗgиI(>u)tsqF}D]"_zq8x}[tb/A!&v_)E|@$xɕ<]%K`ڋ@Bi:@r;c林$TPNX_5X}}x{n \#-Y}{8Jujj{o^`-ɟL7ƾh͘zvcނ[?Ïw*d d8s%RLgH* vXF=&(.lFTTi/)biX E7E2 }}+Ͽ)>Yon(f4ɬiޖe!R׎״Pru|z`H; Vvyxiu:7tߎ>s=e h0曏{%_k)$Hs/3ɗEuȚc[v~۠l0 Kϓ-[/oH3 :|KnK~OM$Wz}Kޅy@grƈʽYpӆ:T)~{Su=S/vÔB FiQx"w7t{?]Zx6[GxrXy -axcF뙄EﯘsYo*zNf֓@nnYg W]dr-rb9י[EH烠Fw$^qp[-(Ѫr=`Й KQRzm jpX4Ri] Y!o 4 ! @.FM,h5j8,m+Tv5Xm/=\*A[I0A] ֶʘ,a?&UƓſ_K!U͝3.e&C{>yR|W$6 0iʿ[oRHe˾žܭ0!Ǵm >@!It| #^3 y.|bujHKrFܲd/W]q%!kc@EIζ%rs/4_86 .W&';/4П#G/ݰ44hϲ.ljM>CsmCꖧܝ #5XH6eQ+ƍT^͐$3EfeQ;NOVSd_tLȺC=:*j"aYXg% 袦tJ3֡^WQ+*E.J3.ɥ֡^WQ\ XTzVKV|M|VZz]EM/V:g+EvZKOg]%z:ld {wwl>> |Ѯh ~4>ݖK"g3ݽs~b?9uddnǴ/?xxSDOGU<6YU?})}$~+ ݡhq]du5 دY^'ft&`vnx S+]x%SɟęɴJE[Ţ(Q9:V/ӏY"9vA60H׾pjM+ )^#<}mpO=$jI\ EN+#E$&T*UF$FWG N̺yI4)k@+UDIXf~iiLDyEl q$Q:"4SdgqVHx Q9=烐B R8X9ˮZ|m/rέ+oO?3\3#aʛ#^*oi3Gd҄H8Vw.N8{ k8vqs8.8mKR˩a_txZ4l7ŷ,=M1Hٯb.QQ&dXO1sl0Ej")pO=u٬ gy\<4lYd4YčRRYY.ՌD;҄gQ:fLfEU.BI1(ҪO-MYz5xYܚg(2*oǴRA~VZ&#+R4>ŰIXiuJgZ=ҬLJQ KggPĥvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005435604315157273633017723 0ustar rootrootMar 20 15:37:47 crc systemd[1]: Starting Kubernetes Kubelet... Mar 20 15:37:47 crc restorecon[4690]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 15:37:47 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:48 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 15:37:49 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 15:37:50 crc restorecon[4690]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Mar 20 15:37:51 crc kubenswrapper[4813]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 15:37:51 crc kubenswrapper[4813]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Mar 20 15:37:51 crc kubenswrapper[4813]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 15:37:51 crc kubenswrapper[4813]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 15:37:51 crc kubenswrapper[4813]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Mar 20 15:37:51 crc kubenswrapper[4813]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.006293 4813 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016600 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016665 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016680 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016694 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016704 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016715 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016725 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016737 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016748 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016757 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016765 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016773 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016781 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016789 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016797 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016804 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016813 4813 feature_gate.go:330] unrecognized feature gate: Example Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016823 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016834 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016842 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016850 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016859 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016867 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016876 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016885 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016894 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016902 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016922 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016931 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016939 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016947 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016955 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016962 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016970 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016978 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016985 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.016993 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017000 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017008 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017016 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017024 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017031 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017039 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017048 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017056 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017065 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017072 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017080 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017087 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017095 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017102 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017110 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017120 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017130 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017140 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017149 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017158 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017168 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017176 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017184 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017194 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017202 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017210 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017218 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017226 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017234 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017243 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017251 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017259 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017266 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.017276 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017531 4813 flags.go:64] FLAG: --address="0.0.0.0" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017555 4813 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017573 4813 flags.go:64] FLAG: --anonymous-auth="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017585 4813 flags.go:64] FLAG: --application-metrics-count-limit="100" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017597 4813 flags.go:64] FLAG: --authentication-token-webhook="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017607 4813 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017620 4813 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017639 4813 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017652 4813 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017663 4813 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017676 4813 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017689 4813 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017701 4813 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017712 4813 flags.go:64] FLAG: --cgroup-root="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017723 4813 flags.go:64] FLAG: --cgroups-per-qos="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017735 4813 flags.go:64] FLAG: --client-ca-file="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017747 4813 flags.go:64] FLAG: --cloud-config="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017758 4813 flags.go:64] FLAG: --cloud-provider="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017769 4813 flags.go:64] FLAG: --cluster-dns="[]" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017784 4813 flags.go:64] FLAG: --cluster-domain="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017795 4813 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017808 4813 flags.go:64] FLAG: --config-dir="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017819 4813 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017835 4813 flags.go:64] FLAG: --container-log-max-files="5" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017861 4813 flags.go:64] FLAG: --container-log-max-size="10Mi" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017872 4813 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017885 4813 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017897 4813 flags.go:64] FLAG: --containerd-namespace="k8s.io" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017909 4813 flags.go:64] FLAG: --contention-profiling="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017921 4813 flags.go:64] FLAG: --cpu-cfs-quota="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017932 4813 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017945 4813 flags.go:64] FLAG: --cpu-manager-policy="none" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017957 4813 flags.go:64] FLAG: --cpu-manager-policy-options="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017969 4813 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017980 4813 flags.go:64] FLAG: --enable-controller-attach-detach="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.017992 4813 flags.go:64] FLAG: --enable-debugging-handlers="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018004 4813 flags.go:64] FLAG: --enable-load-reader="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018015 4813 flags.go:64] FLAG: --enable-server="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018027 4813 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018044 4813 flags.go:64] FLAG: --event-burst="100" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018056 4813 flags.go:64] FLAG: --event-qps="50" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018068 4813 flags.go:64] FLAG: --event-storage-age-limit="default=0" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018080 4813 flags.go:64] FLAG: --event-storage-event-limit="default=0" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018093 4813 flags.go:64] FLAG: --eviction-hard="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018132 4813 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018145 4813 flags.go:64] FLAG: --eviction-minimum-reclaim="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018157 4813 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018171 4813 flags.go:64] FLAG: --eviction-soft="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018183 4813 flags.go:64] FLAG: --eviction-soft-grace-period="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018196 4813 flags.go:64] FLAG: --exit-on-lock-contention="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018208 4813 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018229 4813 flags.go:64] FLAG: --experimental-mounter-path="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018241 4813 flags.go:64] FLAG: --fail-cgroupv1="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018252 4813 flags.go:64] FLAG: --fail-swap-on="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018264 4813 flags.go:64] FLAG: --feature-gates="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018280 4813 flags.go:64] FLAG: --file-check-frequency="20s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018292 4813 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018305 4813 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018317 4813 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018329 4813 flags.go:64] FLAG: --healthz-port="10248" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018340 4813 flags.go:64] FLAG: --help="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018352 4813 flags.go:64] FLAG: --hostname-override="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018364 4813 flags.go:64] FLAG: --housekeeping-interval="10s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018375 4813 flags.go:64] FLAG: --http-check-frequency="20s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018384 4813 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018393 4813 flags.go:64] FLAG: --image-credential-provider-config="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018402 4813 flags.go:64] FLAG: --image-gc-high-threshold="85" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018412 4813 flags.go:64] FLAG: --image-gc-low-threshold="80" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018422 4813 flags.go:64] FLAG: --image-service-endpoint="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018433 4813 flags.go:64] FLAG: --kernel-memcg-notification="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018445 4813 flags.go:64] FLAG: --kube-api-burst="100" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018456 4813 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018469 4813 flags.go:64] FLAG: --kube-api-qps="50" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018515 4813 flags.go:64] FLAG: --kube-reserved="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018529 4813 flags.go:64] FLAG: --kube-reserved-cgroup="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018539 4813 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018551 4813 flags.go:64] FLAG: --kubelet-cgroups="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018563 4813 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018575 4813 flags.go:64] FLAG: --lock-file="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018596 4813 flags.go:64] FLAG: --log-cadvisor-usage="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018609 4813 flags.go:64] FLAG: --log-flush-frequency="5s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018623 4813 flags.go:64] FLAG: --log-json-info-buffer-size="0" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018642 4813 flags.go:64] FLAG: --log-json-split-stream="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018655 4813 flags.go:64] FLAG: --log-text-info-buffer-size="0" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018667 4813 flags.go:64] FLAG: --log-text-split-stream="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018679 4813 flags.go:64] FLAG: --logging-format="text" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018690 4813 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018703 4813 flags.go:64] FLAG: --make-iptables-util-chains="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018715 4813 flags.go:64] FLAG: --manifest-url="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018728 4813 flags.go:64] FLAG: --manifest-url-header="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018743 4813 flags.go:64] FLAG: --max-housekeeping-interval="15s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018755 4813 flags.go:64] FLAG: --max-open-files="1000000" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018770 4813 flags.go:64] FLAG: --max-pods="110" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018781 4813 flags.go:64] FLAG: --maximum-dead-containers="-1" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018794 4813 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018806 4813 flags.go:64] FLAG: --memory-manager-policy="None" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018817 4813 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018829 4813 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018841 4813 flags.go:64] FLAG: --node-ip="192.168.126.11" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018853 4813 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018881 4813 flags.go:64] FLAG: --node-status-max-images="50" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018892 4813 flags.go:64] FLAG: --node-status-update-frequency="10s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018904 4813 flags.go:64] FLAG: --oom-score-adj="-999" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018915 4813 flags.go:64] FLAG: --pod-cidr="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018926 4813 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018947 4813 flags.go:64] FLAG: --pod-manifest-path="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018958 4813 flags.go:64] FLAG: --pod-max-pids="-1" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018970 4813 flags.go:64] FLAG: --pods-per-core="0" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018982 4813 flags.go:64] FLAG: --port="10250" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.018997 4813 flags.go:64] FLAG: --protect-kernel-defaults="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019009 4813 flags.go:64] FLAG: --provider-id="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019022 4813 flags.go:64] FLAG: --qos-reserved="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019033 4813 flags.go:64] FLAG: --read-only-port="10255" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019044 4813 flags.go:64] FLAG: --register-node="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019056 4813 flags.go:64] FLAG: --register-schedulable="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019069 4813 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019090 4813 flags.go:64] FLAG: --registry-burst="10" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019102 4813 flags.go:64] FLAG: --registry-qps="5" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019113 4813 flags.go:64] FLAG: --reserved-cpus="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019125 4813 flags.go:64] FLAG: --reserved-memory="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019139 4813 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019151 4813 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019162 4813 flags.go:64] FLAG: --rotate-certificates="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019174 4813 flags.go:64] FLAG: --rotate-server-certificates="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019185 4813 flags.go:64] FLAG: --runonce="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019196 4813 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019208 4813 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019220 4813 flags.go:64] FLAG: --seccomp-default="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019231 4813 flags.go:64] FLAG: --serialize-image-pulls="true" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019242 4813 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019254 4813 flags.go:64] FLAG: --storage-driver-db="cadvisor" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019267 4813 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019280 4813 flags.go:64] FLAG: --storage-driver-password="root" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019292 4813 flags.go:64] FLAG: --storage-driver-secure="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019305 4813 flags.go:64] FLAG: --storage-driver-table="stats" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019318 4813 flags.go:64] FLAG: --storage-driver-user="root" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019329 4813 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019341 4813 flags.go:64] FLAG: --sync-frequency="1m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019353 4813 flags.go:64] FLAG: --system-cgroups="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019365 4813 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019384 4813 flags.go:64] FLAG: --system-reserved-cgroup="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019398 4813 flags.go:64] FLAG: --tls-cert-file="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019409 4813 flags.go:64] FLAG: --tls-cipher-suites="[]" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019426 4813 flags.go:64] FLAG: --tls-min-version="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019438 4813 flags.go:64] FLAG: --tls-private-key-file="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019449 4813 flags.go:64] FLAG: --topology-manager-policy="none" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019461 4813 flags.go:64] FLAG: --topology-manager-policy-options="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019474 4813 flags.go:64] FLAG: --topology-manager-scope="container" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019521 4813 flags.go:64] FLAG: --v="2" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019539 4813 flags.go:64] FLAG: --version="false" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019554 4813 flags.go:64] FLAG: --vmodule="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019571 4813 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.019583 4813 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019855 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019866 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019902 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019912 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019920 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019931 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019942 4813 feature_gate.go:330] unrecognized feature gate: Example Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019953 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019966 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019981 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.019995 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020007 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020018 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020029 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020037 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020045 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020053 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020061 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020071 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020081 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020092 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020103 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020114 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020124 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020134 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020145 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020157 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020168 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020177 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020190 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020201 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020213 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020225 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020235 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020248 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020259 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020268 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020277 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020286 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020297 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020308 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020320 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020330 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020340 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020350 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020360 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020370 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020380 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020390 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020399 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020410 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020420 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020431 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020441 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020452 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020463 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020473 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020515 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020527 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020539 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020549 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020560 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020568 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020576 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020584 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020592 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020600 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020607 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020615 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020623 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.020653 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.020671 4813 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.034128 4813 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.034187 4813 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034316 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034331 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034340 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034348 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034357 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034366 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034374 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034382 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034389 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034400 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034412 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034422 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034432 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034443 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034452 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034460 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034468 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034477 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034509 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034518 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034526 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034535 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034543 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034551 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034559 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034569 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034578 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034587 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034595 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034602 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034610 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034618 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034626 4813 feature_gate.go:330] unrecognized feature gate: Example Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034633 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034651 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034660 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034668 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034676 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034683 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034691 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034699 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034707 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034714 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034722 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034730 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034737 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034745 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034753 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034760 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034768 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034775 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034783 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034791 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034799 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034807 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034814 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034822 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034830 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034838 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034846 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034854 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034863 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034870 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034878 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034885 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034893 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034901 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034911 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034921 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034931 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.034941 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.034956 4813 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035182 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035196 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035204 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035213 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035225 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035234 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035243 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035252 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035259 4813 feature_gate.go:330] unrecognized feature gate: Example Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035269 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035277 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035285 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035293 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035300 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035309 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035317 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035325 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035333 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035340 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035348 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035356 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035367 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035377 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035387 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035394 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035403 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035411 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035418 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035426 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035434 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035442 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035449 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035457 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035464 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035473 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035506 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035515 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035523 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035531 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035539 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035547 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035555 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035563 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035572 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035579 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035587 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035595 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035603 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035610 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035618 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035626 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035634 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035642 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035650 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035660 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035669 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035678 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035686 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035694 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035702 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035710 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035717 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035725 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035733 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035741 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035748 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035756 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035764 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035771 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035781 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.035793 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.035805 4813 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.036874 4813 server.go:940] "Client rotation is on, will bootstrap in background" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.041603 4813 bootstrap.go:266] "Unhandled Error" err="part of the existing bootstrap client certificate in /var/lib/kubelet/kubeconfig is expired: 2026-02-24 05:52:08 +0000 UTC" logger="UnhandledError" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.046121 4813 bootstrap.go:101] "Use the bootstrap credentials to request a cert, and set kubeconfig to point to the certificate dir" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.046285 4813 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.049249 4813 server.go:997] "Starting client certificate rotation" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.049301 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.049456 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.076075 4813 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.080523 4813 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.081118 4813 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.100572 4813 log.go:25] "Validated CRI v1 runtime API" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.135938 4813 log.go:25] "Validated CRI v1 image API" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.138772 4813 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.143723 4813 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-03-20-15-33-46-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.143747 4813 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.166902 4813 manager.go:217] Machine: {Timestamp:2026-03-20 15:37:51.159209626 +0000 UTC m=+0.581912467 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9 BootID:5a09fee1-2bf6-40b4-b86c-857730ce0ad5 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:9b:c2:62 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:9b:c2:62 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:02:5c:b8 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:dc:d1:84 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:b1:e3:64 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:67:98:3d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:e6:a8:11:6e:a7:14 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:9e:fb:54:99:36:f3 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.167190 4813 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.167342 4813 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.167837 4813 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.168073 4813 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.168114 4813 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.168355 4813 topology_manager.go:138] "Creating topology manager with none policy" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.168373 4813 container_manager_linux.go:303] "Creating device plugin manager" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.170526 4813 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.170566 4813 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.172897 4813 state_mem.go:36] "Initialized new in-memory state store" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.172982 4813 server.go:1245] "Using root directory" path="/var/lib/kubelet" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.177723 4813 kubelet.go:418] "Attempting to sync node with API server" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.177773 4813 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.177861 4813 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.177947 4813 kubelet.go:324] "Adding apiserver pod source" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.177985 4813 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.184096 4813 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.184858 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.185021 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.185456 4813 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.185619 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.185697 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.188574 4813 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193326 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193381 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193391 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193400 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193416 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193453 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193463 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193477 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193501 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193509 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193540 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.193550 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.195850 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.196052 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.196687 4813 server.go:1280] "Started kubelet" Mar 20 15:37:51 crc systemd[1]: Started Kubernetes Kubelet. Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.199707 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.199928 4813 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.199855 4813 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.200279 4813 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.200777 4813 volume_manager.go:287] "The desired_state_of_world populator starts" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.200802 4813 volume_manager.go:289] "Starting Kubelet Volume Manager" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.200924 4813 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.201115 4813 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.215107 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.215753 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.215853 4813 factory.go:55] Registering systemd factory Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.215880 4813 factory.go:221] Registration of the systemd container factory successfully Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.215882 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.216279 4813 factory.go:153] Registering CRI-O factory Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.216312 4813 factory.go:221] Registration of the crio container factory successfully Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.216413 4813 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.216448 4813 factory.go:103] Registering Raw factory Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.216282 4813 server.go:460] "Adding debug handlers to kubelet server" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.216602 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="200ms" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.216470 4813 manager.go:1196] Started watching for new ooms in manager Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.217426 4813 manager.go:319] Starting recovery of all containers Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.217626 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.181:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.189e96c3dcbc1a9e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,LastTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221343 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221380 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221391 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221402 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221410 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221418 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221426 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221435 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221445 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221453 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221461 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221469 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221477 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221511 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221521 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221529 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221538 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221546 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221555 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221563 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221571 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221578 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221588 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221596 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221604 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221612 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221623 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221633 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221642 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221650 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221662 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221676 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221684 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221695 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221703 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221711 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221719 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221729 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221739 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221763 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221776 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221786 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221798 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221809 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221825 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221834 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221843 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221885 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221908 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221922 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221933 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221945 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221966 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.221980 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222004 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222020 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222034 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222046 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222059 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222072 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222084 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222095 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222107 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222118 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222132 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222143 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222156 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222167 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222179 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222190 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222202 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222213 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222224 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222235 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222248 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222259 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222274 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222285 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222298 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222311 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222326 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222338 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222349 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222360 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222369 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222379 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222389 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222398 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222407 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222416 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222425 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222434 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222442 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222450 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222459 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222470 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222498 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222524 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222536 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222547 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222561 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222572 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222584 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222596 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222618 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222629 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222640 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222652 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222662 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222672 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222683 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.222694 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.227638 4813 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228087 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228281 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228443 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228565 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228629 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228698 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228776 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228847 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.228903 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229056 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229125 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229201 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229283 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229346 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229429 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229519 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.229588 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230012 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230079 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230156 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230226 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230290 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230348 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230468 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230569 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230679 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230802 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230890 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.230967 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231028 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231107 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231168 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231269 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231403 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231471 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231704 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231825 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.231938 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.232411 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.232500 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.232561 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.232642 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.232701 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233151 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233173 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233184 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233195 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233205 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233215 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233225 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233235 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233245 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233255 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233264 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233277 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233287 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233296 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233306 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233316 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233327 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233336 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233371 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233380 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233390 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233400 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233410 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233419 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233430 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233440 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233451 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233461 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233472 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233512 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233527 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233537 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233546 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233555 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233565 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233575 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233584 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233592 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233602 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233610 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233619 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233631 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233642 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233652 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233661 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233674 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233688 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233708 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233722 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233737 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233750 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233763 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233775 4813 reconstruct.go:97] "Volume reconstruction finished" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.233786 4813 reconciler.go:26] "Reconciler: start to sync state" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.239016 4813 manager.go:324] Recovery completed Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.246838 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.248085 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.248144 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.248157 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.248881 4813 cpu_manager.go:225] "Starting CPU manager" policy="none" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.248895 4813 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.248911 4813 state_mem.go:36] "Initialized new in-memory state store" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.263060 4813 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.264504 4813 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.264540 4813 status_manager.go:217] "Starting to sync pod status with apiserver" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.264565 4813 kubelet.go:2335] "Starting kubelet main sync loop" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.264608 4813 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.266188 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.266230 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.281070 4813 policy_none.go:49] "None policy: Start" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.281937 4813 memory_manager.go:170] "Starting memorymanager" policy="None" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.281973 4813 state_mem.go:35] "Initializing new in-memory state store" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.315317 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.345762 4813 manager.go:334] "Starting Device Plugin manager" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.345835 4813 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.345847 4813 server.go:79] "Starting device plugin registration server" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.346244 4813 eviction_manager.go:189] "Eviction manager: starting control loop" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.346263 4813 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.346398 4813 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.346519 4813 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.346543 4813 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.351939 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.364722 4813 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.364818 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.366637 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.366701 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.366722 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.366843 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.367119 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.367170 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368169 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368198 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368172 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368223 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368231 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368209 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368330 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368684 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.368748 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.369050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.369131 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.369144 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.369418 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.369614 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.369885 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370019 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370038 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370049 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370288 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370308 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370430 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370590 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370665 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370942 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370960 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370969 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.370988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.371001 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.371015 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.371336 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.371356 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.371397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.371412 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.371415 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.372244 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.372286 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.372295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.417501 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="400ms" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436732 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436772 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436797 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436825 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436893 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436930 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436951 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.436977 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.437001 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.437022 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.437041 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.437060 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.437079 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.437099 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.437117 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.446604 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.447583 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.447630 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.447643 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.447695 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.448156 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538223 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538297 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538338 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538376 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538421 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538457 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538442 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538509 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538529 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538545 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538456 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538565 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538579 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538578 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538589 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538618 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538654 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538661 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538678 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538709 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538765 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538790 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538814 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538858 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538888 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538888 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538918 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538921 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.538962 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.539064 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.648926 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.650411 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.650521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.650541 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.650588 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.651141 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.706907 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.709073 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.717969 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.733038 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: I0320 15:37:51.737654 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.810556 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-229412a1e0281125eb6d23f3719640e03c1dc867c48d2fb8eef7e056d3cefb11 WatchSource:0}: Error finding container 229412a1e0281125eb6d23f3719640e03c1dc867c48d2fb8eef7e056d3cefb11: Status 404 returned error can't find the container with id 229412a1e0281125eb6d23f3719640e03c1dc867c48d2fb8eef7e056d3cefb11 Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.813144 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-2bcb251c4b49d443b87e33324817fc983124da70f9c41b6d9e7fd0eff92397dd WatchSource:0}: Error finding container 2bcb251c4b49d443b87e33324817fc983124da70f9c41b6d9e7fd0eff92397dd: Status 404 returned error can't find the container with id 2bcb251c4b49d443b87e33324817fc983124da70f9c41b6d9e7fd0eff92397dd Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.815178 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-40b82f0c1de76bc51f56d6fdbc03450769bbbde84ee7cc033778189db759d7fe WatchSource:0}: Error finding container 40b82f0c1de76bc51f56d6fdbc03450769bbbde84ee7cc033778189db759d7fe: Status 404 returned error can't find the container with id 40b82f0c1de76bc51f56d6fdbc03450769bbbde84ee7cc033778189db759d7fe Mar 20 15:37:51 crc kubenswrapper[4813]: E0320 15:37:51.818823 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="800ms" Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.821471 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-bda6f5f266992b1ffeefc88e245fe6628f1d1ee91870d8b145e5ac3fdd6a6727 WatchSource:0}: Error finding container bda6f5f266992b1ffeefc88e245fe6628f1d1ee91870d8b145e5ac3fdd6a6727: Status 404 returned error can't find the container with id bda6f5f266992b1ffeefc88e245fe6628f1d1ee91870d8b145e5ac3fdd6a6727 Mar 20 15:37:51 crc kubenswrapper[4813]: W0320 15:37:51.824194 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-195695468e2a8a0e88d7f2023e0bc2fac21d2c7db755742d19b50f6d723b62a7 WatchSource:0}: Error finding container 195695468e2a8a0e88d7f2023e0bc2fac21d2c7db755742d19b50f6d723b62a7: Status 404 returned error can't find the container with id 195695468e2a8a0e88d7f2023e0bc2fac21d2c7db755742d19b50f6d723b62a7 Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.051557 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.053101 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.053134 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.053142 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.053165 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:37:52 crc kubenswrapper[4813]: E0320 15:37:52.053805 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.197171 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.270069 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"229412a1e0281125eb6d23f3719640e03c1dc867c48d2fb8eef7e056d3cefb11"} Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.271133 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"195695468e2a8a0e88d7f2023e0bc2fac21d2c7db755742d19b50f6d723b62a7"} Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.272216 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bda6f5f266992b1ffeefc88e245fe6628f1d1ee91870d8b145e5ac3fdd6a6727"} Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.273366 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"40b82f0c1de76bc51f56d6fdbc03450769bbbde84ee7cc033778189db759d7fe"} Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.274473 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2bcb251c4b49d443b87e33324817fc983124da70f9c41b6d9e7fd0eff92397dd"} Mar 20 15:37:52 crc kubenswrapper[4813]: W0320 15:37:52.540898 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:52 crc kubenswrapper[4813]: E0320 15:37:52.540987 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:52 crc kubenswrapper[4813]: E0320 15:37:52.620211 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="1.6s" Mar 20 15:37:52 crc kubenswrapper[4813]: W0320 15:37:52.653143 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:52 crc kubenswrapper[4813]: E0320 15:37:52.653279 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:52 crc kubenswrapper[4813]: W0320 15:37:52.702417 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:52 crc kubenswrapper[4813]: E0320 15:37:52.702525 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:52 crc kubenswrapper[4813]: W0320 15:37:52.744685 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:52 crc kubenswrapper[4813]: E0320 15:37:52.744811 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.854118 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.855565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.855609 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.855621 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:52 crc kubenswrapper[4813]: I0320 15:37:52.855652 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:37:52 crc kubenswrapper[4813]: E0320 15:37:52.856107 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.166343 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 15:37:53 crc kubenswrapper[4813]: E0320 15:37:53.167796 4813 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.197818 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.280424 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943"} Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.280532 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.283022 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.283061 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3"} Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.283104 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.283129 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.283230 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.284801 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.284854 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.284879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.285408 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"86ccefb1bcc2d04371d91a518875328c145139e697219054fb3e9afb0f30716c"} Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.288098 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a"} Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.288153 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.289620 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.289665 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.289681 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.290009 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45"} Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.290144 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.291363 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.291410 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:53 crc kubenswrapper[4813]: I0320 15:37:53.291428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.197541 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:54 crc kubenswrapper[4813]: E0320 15:37:54.221449 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="3.2s" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.294845 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3" exitCode=0 Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.294947 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3"} Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.295068 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.296312 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.296368 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.296386 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.296987 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a9511d1d58aa16081ae5deb6c8739f1dba7f6bb0f9dfdbf4ba666c26e9d71e47"} Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.298426 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.299038 4813 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a" exitCode=0 Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.299109 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a"} Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.299162 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.299818 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.299856 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.299868 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.300108 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.300162 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.300178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.301563 4813 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45" exitCode=0 Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.301678 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45"} Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.301742 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.305641 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.305671 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943"} Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.305689 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.305640 4813 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943" exitCode=0 Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.305693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.305808 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.308872 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.308918 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.308933 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.457089 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.458727 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.458780 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.458796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:54 crc kubenswrapper[4813]: I0320 15:37:54.458832 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:37:54 crc kubenswrapper[4813]: E0320 15:37:54.459279 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Mar 20 15:37:54 crc kubenswrapper[4813]: W0320 15:37:54.520159 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:54 crc kubenswrapper[4813]: E0320 15:37:54.520255 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:54 crc kubenswrapper[4813]: W0320 15:37:54.961954 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:54 crc kubenswrapper[4813]: E0320 15:37:54.962047 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.196926 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.309012 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.309105 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.309914 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.309942 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.309952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.312281 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.312310 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.312323 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.317802 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"70aa29b0ae23f70f78b4c8c6923ab47903541d022d5f008051f2faa51b484018"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.318354 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c5c590d909352798fc16de9dea6223ff5fa5299909451ff7a7c59e2a208a6dc4"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.318224 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.320018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.320053 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.320063 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.321377 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.321425 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.321436 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.321511 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.322497 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.322527 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.322539 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.324394 4813 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b" exitCode=0 Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.324500 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.324591 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b"} Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.325208 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.325251 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:55 crc kubenswrapper[4813]: I0320 15:37:55.325268 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:55 crc kubenswrapper[4813]: W0320 15:37:55.406538 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:55 crc kubenswrapper[4813]: E0320 15:37:55.406610 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:55 crc kubenswrapper[4813]: E0320 15:37:55.552213 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.181:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.189e96c3dcbc1a9e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,LastTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:37:55 crc kubenswrapper[4813]: W0320 15:37:55.552398 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:55 crc kubenswrapper[4813]: E0320 15:37:55.552461 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.196723 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.330688 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c0b40daaf4d70650e7c8e426483a2b96b7bbf64b7d52a09e318d130943af1d00"} Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.330760 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b"} Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.330913 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.332075 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.332121 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.332138 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.335132 4813 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20" exitCode=0 Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.335240 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.335932 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.336521 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20"} Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.336632 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.337199 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.337262 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338855 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338866 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338886 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338899 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338904 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338908 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.339021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.339046 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.339061 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.338866 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.513830 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:56 crc kubenswrapper[4813]: I0320 15:37:56.792417 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.274541 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.340635 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1"} Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.340691 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173"} Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.340709 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49"} Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.340720 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d"} Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.340762 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.340822 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.341564 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.342071 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.342087 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.342094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.342792 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.342810 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.342817 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.659643 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.661447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.661555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.661584 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.661618 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:37:57 crc kubenswrapper[4813]: I0320 15:37:57.923858 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.349545 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306"} Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.349650 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.349698 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.349705 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.351298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.351353 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.351301 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.351373 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.351403 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.351524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.505966 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.807291 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.807569 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.809099 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.809156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.809180 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.917241 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.917411 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.919270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.919342 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:58 crc kubenswrapper[4813]: I0320 15:37:58.919380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.215908 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.353727 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.353761 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.353746 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.353932 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356152 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356189 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356235 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356200 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356308 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356265 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356287 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:37:59 crc kubenswrapper[4813]: I0320 15:37:59.356498 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:00 crc kubenswrapper[4813]: I0320 15:38:00.357703 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:00 crc kubenswrapper[4813]: I0320 15:38:00.359168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:00 crc kubenswrapper[4813]: I0320 15:38:00.359232 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:00 crc kubenswrapper[4813]: I0320 15:38:00.359248 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.305681 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.305854 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.306871 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.306896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.306907 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:01 crc kubenswrapper[4813]: E0320 15:38:01.352049 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.717532 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.717786 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.719247 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.719276 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.719285 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.727187 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.814883 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.815599 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.817752 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.817820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.817843 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.917998 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:38:01 crc kubenswrapper[4813]: I0320 15:38:01.918095 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:38:02 crc kubenswrapper[4813]: I0320 15:38:02.362437 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:02 crc kubenswrapper[4813]: I0320 15:38:02.363591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:02 crc kubenswrapper[4813]: I0320 15:38:02.363653 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:02 crc kubenswrapper[4813]: I0320 15:38:02.363669 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.268957 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51138->192.168.126.11:17697: read: connection reset by peer" start-of-body= Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.269341 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51138->192.168.126.11:17697: read: connection reset by peer" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.376113 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.378088 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c0b40daaf4d70650e7c8e426483a2b96b7bbf64b7d52a09e318d130943af1d00" exitCode=255 Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.378145 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c0b40daaf4d70650e7c8e426483a2b96b7bbf64b7d52a09e318d130943af1d00"} Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.378306 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.379352 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.379373 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.379382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.380002 4813 scope.go:117] "RemoveContainer" containerID="c0b40daaf4d70650e7c8e426483a2b96b7bbf64b7d52a09e318d130943af1d00" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.381411 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.381470 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Mar 20 15:38:06 crc kubenswrapper[4813]: W0320 15:38:06.381838 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.381987 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.383901 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 15:38:06 crc kubenswrapper[4813]: W0320 15:38:06.386280 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.386357 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:06 crc kubenswrapper[4813]: W0320 15:38:06.386659 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.386789 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:06 crc kubenswrapper[4813]: W0320 15:38:06.387033 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.387124 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.387451 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.390043 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" interval="6.4s" Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.392381 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" event="&Event{ObjectMeta:{crc.189e96c3dcbc1a9e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,LastTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:06 crc kubenswrapper[4813]: E0320 15:38:06.393054 4813 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:06Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.396742 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.396786 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.519496 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]log ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]etcd ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-apiserver-admission-initializer ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/openshift.io-api-request-count-filter ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/openshift.io-startkubeinformers ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/generic-apiserver-start-informers ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/priority-and-fairness-config-consumer ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/priority-and-fairness-filter ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/storage-object-count-tracker-hook ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-apiextensions-informers ok Mar 20 15:38:06 crc kubenswrapper[4813]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Mar 20 15:38:06 crc kubenswrapper[4813]: [-]poststarthook/crd-informer-synced failed: reason withheld Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-system-namespaces-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-cluster-authentication-info-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-legacy-token-tracking-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-service-ip-repair-controllers ok Mar 20 15:38:06 crc kubenswrapper[4813]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Mar 20 15:38:06 crc kubenswrapper[4813]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/priority-and-fairness-config-producer ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/bootstrap-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/start-kube-aggregator-informers ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/apiservice-status-local-available-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/apiservice-status-remote-available-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/apiservice-registration-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/apiservice-wait-for-first-sync ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/apiservice-discovery-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/kube-apiserver-autoregistration ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]autoregister-completion ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/apiservice-openapi-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: [+]poststarthook/apiservice-openapiv3-controller ok Mar 20 15:38:06 crc kubenswrapper[4813]: livez check failed Mar 20 15:38:06 crc kubenswrapper[4813]: I0320 15:38:06.519559 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:38:07 crc kubenswrapper[4813]: I0320 15:38:07.200956 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:07Z is after 2026-02-23T05:33:13Z Mar 20 15:38:07 crc kubenswrapper[4813]: I0320 15:38:07.383504 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Mar 20 15:38:07 crc kubenswrapper[4813]: I0320 15:38:07.385844 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f"} Mar 20 15:38:07 crc kubenswrapper[4813]: I0320 15:38:07.386014 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:07 crc kubenswrapper[4813]: I0320 15:38:07.387056 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:07 crc kubenswrapper[4813]: I0320 15:38:07.387095 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:07 crc kubenswrapper[4813]: I0320 15:38:07.387107 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.200939 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:08Z is after 2026-02-23T05:33:13Z Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.390063 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.390688 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.392357 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" exitCode=255 Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.392398 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f"} Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.392443 4813 scope.go:117] "RemoveContainer" containerID="c0b40daaf4d70650e7c8e426483a2b96b7bbf64b7d52a09e318d130943af1d00" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.392641 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.393775 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.393820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.393837 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:08 crc kubenswrapper[4813]: I0320 15:38:08.394646 4813 scope.go:117] "RemoveContainer" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" Mar 20 15:38:08 crc kubenswrapper[4813]: E0320 15:38:08.394908 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:09 crc kubenswrapper[4813]: I0320 15:38:09.200869 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:09Z is after 2026-02-23T05:33:13Z Mar 20 15:38:09 crc kubenswrapper[4813]: I0320 15:38:09.221926 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:09 crc kubenswrapper[4813]: I0320 15:38:09.222101 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:09 crc kubenswrapper[4813]: I0320 15:38:09.223505 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:09 crc kubenswrapper[4813]: I0320 15:38:09.223549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:09 crc kubenswrapper[4813]: I0320 15:38:09.223565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:09 crc kubenswrapper[4813]: I0320 15:38:09.430633 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Mar 20 15:38:10 crc kubenswrapper[4813]: I0320 15:38:10.200391 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:10Z is after 2026-02-23T05:33:13Z Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.200884 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:11Z is after 2026-02-23T05:33:13Z Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.305802 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.306059 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.307649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.307682 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.307694 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.308231 4813 scope.go:117] "RemoveContainer" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" Mar 20 15:38:11 crc kubenswrapper[4813]: E0320 15:38:11.308406 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:11 crc kubenswrapper[4813]: E0320 15:38:11.352197 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.522160 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.522421 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.524044 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.524096 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.524114 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.525017 4813 scope.go:117] "RemoveContainer" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" Mar 20 15:38:11 crc kubenswrapper[4813]: E0320 15:38:11.525316 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.529100 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.857311 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.857586 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.858901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.858972 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.858996 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.875120 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.918214 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:38:11 crc kubenswrapper[4813]: I0320 15:38:11.918392 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.201901 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:12Z is after 2026-02-23T05:33:13Z Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.440056 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.440362 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.441080 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.441155 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.441182 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.441963 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.442025 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.442038 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.442545 4813 scope.go:117] "RemoveContainer" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" Mar 20 15:38:12 crc kubenswrapper[4813]: E0320 15:38:12.442824 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.784943 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.787010 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.787064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.787082 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:12 crc kubenswrapper[4813]: I0320 15:38:12.787113 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:38:12 crc kubenswrapper[4813]: E0320 15:38:12.792411 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:12Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 15:38:12 crc kubenswrapper[4813]: E0320 15:38:12.796028 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:12Z is after 2026-02-23T05:33:13Z" interval="7s" Mar 20 15:38:13 crc kubenswrapper[4813]: I0320 15:38:13.202786 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:13Z is after 2026-02-23T05:33:13Z Mar 20 15:38:13 crc kubenswrapper[4813]: W0320 15:38:13.998163 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:13Z is after 2026-02-23T05:33:13Z Mar 20 15:38:13 crc kubenswrapper[4813]: E0320 15:38:13.998259 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:13Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:14 crc kubenswrapper[4813]: I0320 15:38:14.201336 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:14Z is after 2026-02-23T05:33:13Z Mar 20 15:38:14 crc kubenswrapper[4813]: I0320 15:38:14.973330 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 15:38:14 crc kubenswrapper[4813]: E0320 15:38:14.977824 4813 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:14Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:15 crc kubenswrapper[4813]: I0320 15:38:15.201763 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:15Z is after 2026-02-23T05:33:13Z Mar 20 15:38:15 crc kubenswrapper[4813]: I0320 15:38:15.217446 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:15 crc kubenswrapper[4813]: I0320 15:38:15.217973 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:15 crc kubenswrapper[4813]: I0320 15:38:15.219558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:15 crc kubenswrapper[4813]: I0320 15:38:15.219784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:15 crc kubenswrapper[4813]: I0320 15:38:15.219953 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:15 crc kubenswrapper[4813]: I0320 15:38:15.220892 4813 scope.go:117] "RemoveContainer" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" Mar 20 15:38:15 crc kubenswrapper[4813]: E0320 15:38:15.221347 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:15 crc kubenswrapper[4813]: W0320 15:38:15.461987 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:15Z is after 2026-02-23T05:33:13Z Mar 20 15:38:15 crc kubenswrapper[4813]: E0320 15:38:15.462081 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:15Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:15 crc kubenswrapper[4813]: W0320 15:38:15.536932 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:15Z is after 2026-02-23T05:33:13Z Mar 20 15:38:15 crc kubenswrapper[4813]: E0320 15:38:15.537029 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:15Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:16 crc kubenswrapper[4813]: I0320 15:38:16.202293 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:16Z is after 2026-02-23T05:33:13Z Mar 20 15:38:16 crc kubenswrapper[4813]: E0320 15:38:16.397248 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:16Z is after 2026-02-23T05:33:13Z" event="&Event{ObjectMeta:{crc.189e96c3dcbc1a9e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,LastTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:16 crc kubenswrapper[4813]: W0320 15:38:16.522858 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:16Z is after 2026-02-23T05:33:13Z Mar 20 15:38:16 crc kubenswrapper[4813]: E0320 15:38:16.523256 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:16Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:17 crc kubenswrapper[4813]: I0320 15:38:17.199959 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:17Z is after 2026-02-23T05:33:13Z Mar 20 15:38:18 crc kubenswrapper[4813]: I0320 15:38:18.201178 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:18Z is after 2026-02-23T05:33:13Z Mar 20 15:38:19 crc kubenswrapper[4813]: I0320 15:38:19.201539 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:19Z is after 2026-02-23T05:33:13Z Mar 20 15:38:19 crc kubenswrapper[4813]: I0320 15:38:19.793460 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:19 crc kubenswrapper[4813]: I0320 15:38:19.794879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:19 crc kubenswrapper[4813]: I0320 15:38:19.794990 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:19 crc kubenswrapper[4813]: I0320 15:38:19.795062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:19 crc kubenswrapper[4813]: I0320 15:38:19.795130 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:38:19 crc kubenswrapper[4813]: E0320 15:38:19.798490 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:19Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 15:38:19 crc kubenswrapper[4813]: E0320 15:38:19.801065 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:19Z is after 2026-02-23T05:33:13Z" interval="7s" Mar 20 15:38:20 crc kubenswrapper[4813]: I0320 15:38:20.200243 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:20Z is after 2026-02-23T05:33:13Z Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.201588 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:21Z is after 2026-02-23T05:33:13Z Mar 20 15:38:21 crc kubenswrapper[4813]: E0320 15:38:21.352697 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.918239 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.918317 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.918380 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.918604 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.919798 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.919855 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.919879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.920660 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cluster-policy-controller" containerStatusID={"Type":"cri-o","ID":"a9511d1d58aa16081ae5deb6c8739f1dba7f6bb0f9dfdbf4ba666c26e9d71e47"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container cluster-policy-controller failed startup probe, will be restarted" Mar 20 15:38:21 crc kubenswrapper[4813]: I0320 15:38:21.920895 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" containerID="cri-o://a9511d1d58aa16081ae5deb6c8739f1dba7f6bb0f9dfdbf4ba666c26e9d71e47" gracePeriod=30 Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.201147 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:22Z is after 2026-02-23T05:33:13Z Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.504910 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.505626 4813 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="a9511d1d58aa16081ae5deb6c8739f1dba7f6bb0f9dfdbf4ba666c26e9d71e47" exitCode=255 Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.505687 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"a9511d1d58aa16081ae5deb6c8739f1dba7f6bb0f9dfdbf4ba666c26e9d71e47"} Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.505727 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d2c6b6be5a3c802947f83131f64a56b0438a1a15b908524c54cee55759b168ce"} Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.505872 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.507062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.507109 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:22 crc kubenswrapper[4813]: I0320 15:38:22.507126 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:23 crc kubenswrapper[4813]: I0320 15:38:23.199138 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:23Z is after 2026-02-23T05:33:13Z Mar 20 15:38:24 crc kubenswrapper[4813]: I0320 15:38:24.200007 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:24Z is after 2026-02-23T05:33:13Z Mar 20 15:38:25 crc kubenswrapper[4813]: I0320 15:38:25.201145 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:25Z is after 2026-02-23T05:33:13Z Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.200714 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:26Z is after 2026-02-23T05:33:13Z Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.265715 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.266976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.267021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.267036 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.267621 4813 scope.go:117] "RemoveContainer" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" Mar 20 15:38:26 crc kubenswrapper[4813]: E0320 15:38:26.400431 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:26Z is after 2026-02-23T05:33:13Z" event="&Event{ObjectMeta:{crc.189e96c3dcbc1a9e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,LastTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.793305 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.793442 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.794769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.794855 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.794870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.799449 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.800351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.800395 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.800410 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:26 crc kubenswrapper[4813]: I0320 15:38:26.800434 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:38:26 crc kubenswrapper[4813]: E0320 15:38:26.803535 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:26Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 15:38:26 crc kubenswrapper[4813]: E0320 15:38:26.805676 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:26Z is after 2026-02-23T05:33:13Z" interval="7s" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.199036 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:27Z is after 2026-02-23T05:33:13Z Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.522784 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.523897 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.529665 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4f848aa2c409273a4d1629bac5db7a9cf0f7f377e0df4508402db95cdf3f33ad" exitCode=255 Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.529705 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"4f848aa2c409273a4d1629bac5db7a9cf0f7f377e0df4508402db95cdf3f33ad"} Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.529771 4813 scope.go:117] "RemoveContainer" containerID="f5cd2aaaec8842b2ee0018e5aa2c9c0d8faf338f6a19d79eeedbe908fd186a8f" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.529861 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.530813 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.530849 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.530860 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:27 crc kubenswrapper[4813]: I0320 15:38:27.531342 4813 scope.go:117] "RemoveContainer" containerID="4f848aa2c409273a4d1629bac5db7a9cf0f7f377e0df4508402db95cdf3f33ad" Mar 20 15:38:27 crc kubenswrapper[4813]: E0320 15:38:27.531561 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:28 crc kubenswrapper[4813]: I0320 15:38:28.200102 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:28Z is after 2026-02-23T05:33:13Z Mar 20 15:38:28 crc kubenswrapper[4813]: I0320 15:38:28.537793 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 15:38:28 crc kubenswrapper[4813]: I0320 15:38:28.917569 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:28 crc kubenswrapper[4813]: I0320 15:38:28.917822 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:28 crc kubenswrapper[4813]: I0320 15:38:28.919264 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:28 crc kubenswrapper[4813]: I0320 15:38:28.919338 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:28 crc kubenswrapper[4813]: I0320 15:38:28.919363 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:29 crc kubenswrapper[4813]: I0320 15:38:29.199569 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:29Z is after 2026-02-23T05:33:13Z Mar 20 15:38:30 crc kubenswrapper[4813]: I0320 15:38:30.201651 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:30Z is after 2026-02-23T05:33:13Z Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.017818 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 15:38:31 crc kubenswrapper[4813]: E0320 15:38:31.025183 4813 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:31Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:31 crc kubenswrapper[4813]: E0320 15:38:31.026783 4813 certificate_manager.go:440] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Reached backoff limit, still unable to rotate certs: timed out waiting for the condition" logger="UnhandledError" Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.204099 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:31Z is after 2026-02-23T05:33:13Z Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.305746 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.305968 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.307462 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.307537 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.307554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.308464 4813 scope.go:117] "RemoveContainer" containerID="4f848aa2c409273a4d1629bac5db7a9cf0f7f377e0df4508402db95cdf3f33ad" Mar 20 15:38:31 crc kubenswrapper[4813]: E0320 15:38:31.308785 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:31 crc kubenswrapper[4813]: E0320 15:38:31.353286 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.918142 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:38:31 crc kubenswrapper[4813]: I0320 15:38:31.918312 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:38:32 crc kubenswrapper[4813]: I0320 15:38:32.199553 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:32Z is after 2026-02-23T05:33:13Z Mar 20 15:38:32 crc kubenswrapper[4813]: W0320 15:38:32.460423 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:32Z is after 2026-02-23T05:33:13Z Mar 20 15:38:32 crc kubenswrapper[4813]: E0320 15:38:32.460570 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:32Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:33 crc kubenswrapper[4813]: I0320 15:38:33.202107 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:33Z is after 2026-02-23T05:33:13Z Mar 20 15:38:33 crc kubenswrapper[4813]: I0320 15:38:33.804505 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:33 crc kubenswrapper[4813]: I0320 15:38:33.806217 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:33 crc kubenswrapper[4813]: I0320 15:38:33.806257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:33 crc kubenswrapper[4813]: I0320 15:38:33.806272 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:33 crc kubenswrapper[4813]: I0320 15:38:33.806300 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:38:33 crc kubenswrapper[4813]: E0320 15:38:33.809197 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:33Z is after 2026-02-23T05:33:13Z" interval="7s" Mar 20 15:38:33 crc kubenswrapper[4813]: E0320 15:38:33.809811 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:33Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 15:38:34 crc kubenswrapper[4813]: I0320 15:38:34.201136 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:34Z is after 2026-02-23T05:33:13Z Mar 20 15:38:34 crc kubenswrapper[4813]: W0320 15:38:34.942659 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:34Z is after 2026-02-23T05:33:13Z Mar 20 15:38:34 crc kubenswrapper[4813]: E0320 15:38:34.942766 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:34Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:35 crc kubenswrapper[4813]: I0320 15:38:35.200420 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:35Z is after 2026-02-23T05:33:13Z Mar 20 15:38:35 crc kubenswrapper[4813]: I0320 15:38:35.218038 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:35 crc kubenswrapper[4813]: I0320 15:38:35.218366 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:35 crc kubenswrapper[4813]: I0320 15:38:35.220067 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:35 crc kubenswrapper[4813]: I0320 15:38:35.220134 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:35 crc kubenswrapper[4813]: I0320 15:38:35.220149 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:35 crc kubenswrapper[4813]: I0320 15:38:35.220994 4813 scope.go:117] "RemoveContainer" containerID="4f848aa2c409273a4d1629bac5db7a9cf0f7f377e0df4508402db95cdf3f33ad" Mar 20 15:38:35 crc kubenswrapper[4813]: E0320 15:38:35.221341 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:36 crc kubenswrapper[4813]: I0320 15:38:36.201349 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:36Z is after 2026-02-23T05:33:13Z Mar 20 15:38:36 crc kubenswrapper[4813]: E0320 15:38:36.405638 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:36Z is after 2026-02-23T05:33:13Z" event="&Event{ObjectMeta:{crc.189e96c3dcbc1a9e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,LastTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:36 crc kubenswrapper[4813]: W0320 15:38:36.974101 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:36Z is after 2026-02-23T05:33:13Z Mar 20 15:38:36 crc kubenswrapper[4813]: E0320 15:38:36.974199 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:36Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:37 crc kubenswrapper[4813]: I0320 15:38:37.201709 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:37Z is after 2026-02-23T05:33:13Z Mar 20 15:38:37 crc kubenswrapper[4813]: W0320 15:38:37.326951 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:37Z is after 2026-02-23T05:33:13Z Mar 20 15:38:37 crc kubenswrapper[4813]: E0320 15:38:37.327130 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:37Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 15:38:38 crc kubenswrapper[4813]: I0320 15:38:38.200334 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:38Z is after 2026-02-23T05:33:13Z Mar 20 15:38:39 crc kubenswrapper[4813]: I0320 15:38:39.202178 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:39Z is after 2026-02-23T05:33:13Z Mar 20 15:38:40 crc kubenswrapper[4813]: I0320 15:38:40.200174 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:40Z is after 2026-02-23T05:33:13Z Mar 20 15:38:40 crc kubenswrapper[4813]: I0320 15:38:40.810171 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:40 crc kubenswrapper[4813]: I0320 15:38:40.811685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:40 crc kubenswrapper[4813]: I0320 15:38:40.811761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:40 crc kubenswrapper[4813]: I0320 15:38:40.811784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:40 crc kubenswrapper[4813]: I0320 15:38:40.811833 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:38:40 crc kubenswrapper[4813]: E0320 15:38:40.815275 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:40Z is after 2026-02-23T05:33:13Z" interval="7s" Mar 20 15:38:40 crc kubenswrapper[4813]: E0320 15:38:40.818542 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:40Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 15:38:41 crc kubenswrapper[4813]: I0320 15:38:41.200034 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:41Z is after 2026-02-23T05:33:13Z Mar 20 15:38:41 crc kubenswrapper[4813]: E0320 15:38:41.354551 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:38:41 crc kubenswrapper[4813]: I0320 15:38:41.918841 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:38:41 crc kubenswrapper[4813]: I0320 15:38:41.919573 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:38:42 crc kubenswrapper[4813]: I0320 15:38:42.200835 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:42Z is after 2026-02-23T05:33:13Z Mar 20 15:38:43 crc kubenswrapper[4813]: I0320 15:38:43.199415 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:43Z is after 2026-02-23T05:33:13Z Mar 20 15:38:44 crc kubenswrapper[4813]: I0320 15:38:44.201128 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:38:44Z is after 2026-02-23T05:33:13Z Mar 20 15:38:45 crc kubenswrapper[4813]: I0320 15:38:45.202069 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:46 crc kubenswrapper[4813]: I0320 15:38:46.202443 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.411915 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dcbc1a9e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,LastTimestamp:2026-03-20 15:37:51.196658334 +0000 UTC m=+0.619361175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.417315 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.422341 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.427841 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfce017b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,LastTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.434217 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3e5ccd413 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeAllocatableEnforced,Message:Updated Node Allocatable limit across pods,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.348749331 +0000 UTC m=+0.771452172,LastTimestamp:2026-03-20 15:37:51.348749331 +0000 UTC m=+0.771452172,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.441245 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcd8128\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.366666898 +0000 UTC m=+0.789369759,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.445433 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcdd0ed\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.366714859 +0000 UTC m=+0.789417700,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.450596 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfce017b\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfce017b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,LastTimestamp:2026-03-20 15:37:51.36672701 +0000 UTC m=+0.789429851,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.459504 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcd8128\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.368191919 +0000 UTC m=+0.790894760,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.463251 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcdd0ed\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.36820516 +0000 UTC m=+0.790908001,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.469413 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcd8128\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.36821687 +0000 UTC m=+0.790919701,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.472927 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcdd0ed\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.36822822 +0000 UTC m=+0.790931061,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.477017 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfce017b\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfce017b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,LastTimestamp:2026-03-20 15:37:51.368237201 +0000 UTC m=+0.790940042,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.483121 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfce017b\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfce017b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,LastTimestamp:2026-03-20 15:37:51.368288982 +0000 UTC m=+0.790991833,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.489043 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcd8128\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.369076643 +0000 UTC m=+0.791779484,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.494274 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcdd0ed\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.369140755 +0000 UTC m=+0.791843596,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.500332 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfce017b\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfce017b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,LastTimestamp:2026-03-20 15:37:51.369149355 +0000 UTC m=+0.791852196,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.505937 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcd8128\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.370031739 +0000 UTC m=+0.792734570,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.510857 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcdd0ed\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.37004477 +0000 UTC m=+0.792747611,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.514726 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfce017b\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfce017b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,LastTimestamp:2026-03-20 15:37:51.37005457 +0000 UTC m=+0.792757411,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.518553 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcd8128\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.370297987 +0000 UTC m=+0.793000828,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.524558 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcdd0ed\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.370313387 +0000 UTC m=+0.793016218,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.530931 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfce017b\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfce017b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248163195 +0000 UTC m=+0.670866036,LastTimestamp:2026-03-20 15:37:51.370321217 +0000 UTC m=+0.793024058,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.535896 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcd8128\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcd8128 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248130344 +0000 UTC m=+0.670833185,LastTimestamp:2026-03-20 15:37:51.370954354 +0000 UTC m=+0.793657195,Count:8,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.541131 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e96c3dfcdd0ed\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e96c3dfcdd0ed default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.248150765 +0000 UTC m=+0.670853606,LastTimestamp:2026-03-20 15:37:51.370965695 +0000 UTC m=+0.793668536,Count:8,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.547570 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c401ef5cc6 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.820774598 +0000 UTC m=+1.243477469,LastTimestamp:2026-03-20 15:37:51.820774598 +0000 UTC m=+1.243477469,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.552667 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c401f05862 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.82083901 +0000 UTC m=+1.243541851,LastTimestamp:2026-03-20 15:37:51.82083901 +0000 UTC m=+1.243541851,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.558443 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e96c401f7c889 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.821326473 +0000 UTC m=+1.244029344,LastTimestamp:2026-03-20 15:37:51.821326473 +0000 UTC m=+1.244029344,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.564156 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4022143c5 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.824044997 +0000 UTC m=+1.246747878,LastTimestamp:2026-03-20 15:37:51.824044997 +0000 UTC m=+1.246747878,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.570077 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c402589e1e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:51.827672606 +0000 UTC m=+1.250375487,LastTimestamp:2026-03-20 15:37:51.827672606 +0000 UTC m=+1.250375487,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.575004 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c451500d3f openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Created,Message:Created container kube-controller-manager,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.152511295 +0000 UTC m=+2.575214136,LastTimestamp:2026-03-20 15:37:53.152511295 +0000 UTC m=+2.575214136,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.580049 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e96c451511fbf openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.152581567 +0000 UTC m=+2.575284418,LastTimestamp:2026-03-20 15:37:53.152581567 +0000 UTC m=+2.575284418,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.584584 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4515a18af openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Created,Message:Created container wait-for-host-port,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.153169583 +0000 UTC m=+2.575872434,LastTimestamp:2026-03-20 15:37:53.153169583 +0000 UTC m=+2.575872434,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.589423 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c4515acb48 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.153215304 +0000 UTC m=+2.575918145,LastTimestamp:2026-03-20 15:37:53.153215304 +0000 UTC m=+2.575918145,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.594857 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4518d15da openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.156511194 +0000 UTC m=+2.579214035,LastTimestamp:2026-03-20 15:37:53.156511194 +0000 UTC m=+2.579214035,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.599833 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c451d0d875 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Started,Message:Started container kube-controller-manager,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.160951925 +0000 UTC m=+2.583654776,LastTimestamp:2026-03-20 15:37:53.160951925 +0000 UTC m=+2.583654776,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.605170 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c451e4ff27 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.162272551 +0000 UTC m=+2.584975402,LastTimestamp:2026-03-20 15:37:53.162272551 +0000 UTC m=+2.584975402,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.609799 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e96c451e59474 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.162310772 +0000 UTC m=+2.585013613,LastTimestamp:2026-03-20 15:37:53.162310772 +0000 UTC m=+2.585013613,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.615584 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c45224b3ea openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Started,Message:Started container wait-for-host-port,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.166447594 +0000 UTC m=+2.589150435,LastTimestamp:2026-03-20 15:37:53.166447594 +0000 UTC m=+2.589150435,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.620286 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c45275443a openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.171727418 +0000 UTC m=+2.594430259,LastTimestamp:2026-03-20 15:37:53.171727418 +0000 UTC m=+2.594430259,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.626641 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4528811a3 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.172959651 +0000 UTC m=+2.595662492,LastTimestamp:2026-03-20 15:37:53.172959651 +0000 UTC m=+2.595662492,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.633411 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c47e249697 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Created,Message:Created container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.904637591 +0000 UTC m=+3.327340452,LastTimestamp:2026-03-20 15:37:53.904637591 +0000 UTC m=+3.327340452,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.640212 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c495996b29 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.298170153 +0000 UTC m=+3.720873024,LastTimestamp:2026-03-20 15:37:54.298170153 +0000 UTC m=+3.720873024,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.647317 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c495d837d9 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.302285785 +0000 UTC m=+3.724988626,LastTimestamp:2026-03-20 15:37:54.302285785 +0000 UTC m=+3.724988626,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.653843 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c496534f68 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.310352744 +0000 UTC m=+3.733055595,LastTimestamp:2026-03-20 15:37:54.310352744 +0000 UTC m=+3.733055595,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.657959 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e96c49653d2ef openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.310386415 +0000 UTC m=+3.733089286,LastTimestamp:2026-03-20 15:37:54.310386415 +0000 UTC m=+3.733089286,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.661424 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4992e90fb openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Started,Message:Started container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.358276347 +0000 UTC m=+3.780979188,LastTimestamp:2026-03-20 15:37:54.358276347 +0000 UTC m=+3.780979188,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.667209 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4994239e4 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.359564772 +0000 UTC m=+3.782267653,LastTimestamp:2026-03-20 15:37:54.359564772 +0000 UTC m=+3.782267653,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.671306 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4aff14839 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Created,Message:Created container kube-controller-manager-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.740135993 +0000 UTC m=+4.162838874,LastTimestamp:2026-03-20 15:37:54.740135993 +0000 UTC m=+4.162838874,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.677079 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4aff2c2c9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Created,Message:Created container kube-apiserver,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.740232905 +0000 UTC m=+4.162935786,LastTimestamp:2026-03-20 15:37:54.740232905 +0000 UTC m=+4.162935786,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.684080 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c4affa83b9 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Created,Message:Created container etcd-ensure-env-vars,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.740741049 +0000 UTC m=+4.163443940,LastTimestamp:2026-03-20 15:37:54.740741049 +0000 UTC m=+4.163443940,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.689557 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e96c4b00d0aa0 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Created,Message:Created container kube-rbac-proxy-crio,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.741955232 +0000 UTC m=+4.164658063,LastTimestamp:2026-03-20 15:37:54.741955232 +0000 UTC m=+4.164658063,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.694009 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4b00ece51 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Created,Message:Created container kube-scheduler,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.742070865 +0000 UTC m=+4.164773746,LastTimestamp:2026-03-20 15:37:54.742070865 +0000 UTC m=+4.164773746,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.697555 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4b126c8ce openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Started,Message:Started container kube-scheduler,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.760419534 +0000 UTC m=+4.183122395,LastTimestamp:2026-03-20 15:37:54.760419534 +0000 UTC m=+4.183122395,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.701592 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4b14cdf62 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.762915682 +0000 UTC m=+4.185618543,LastTimestamp:2026-03-20 15:37:54.762915682 +0000 UTC m=+4.185618543,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.705451 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4b164a5ff openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Started,Message:Started container kube-controller-manager-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.764473855 +0000 UTC m=+4.187176706,LastTimestamp:2026-03-20 15:37:54.764473855 +0000 UTC m=+4.187176706,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.709013 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4b17bef86 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.766000006 +0000 UTC m=+4.188702857,LastTimestamp:2026-03-20 15:37:54.766000006 +0000 UTC m=+4.188702857,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.712098 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4b1857459 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Started,Message:Started container kube-apiserver,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.766623833 +0000 UTC m=+4.189326714,LastTimestamp:2026-03-20 15:37:54.766623833 +0000 UTC m=+4.189326714,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.715763 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4b19a4dc2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.76799021 +0000 UTC m=+4.190693071,LastTimestamp:2026-03-20 15:37:54.76799021 +0000 UTC m=+4.190693071,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.720582 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c4b1afe6bd openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Started,Message:Started container etcd-ensure-env-vars,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.769405629 +0000 UTC m=+4.192108500,LastTimestamp:2026-03-20 15:37:54.769405629 +0000 UTC m=+4.192108500,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.724692 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e96c4b1d195dd openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Started,Message:Started container kube-rbac-proxy-crio,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.771613149 +0000 UTC m=+4.194316000,LastTimestamp:2026-03-20 15:37:54.771613149 +0000 UTC m=+4.194316000,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.730232 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4be2ce849 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Created,Message:Created container kube-controller-manager-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.978924617 +0000 UTC m=+4.401627458,LastTimestamp:2026-03-20 15:37:54.978924617 +0000 UTC m=+4.401627458,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.734796 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4be6fe440 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Created,Message:Created container kube-apiserver-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.983314496 +0000 UTC m=+4.406017337,LastTimestamp:2026-03-20 15:37:54.983314496 +0000 UTC m=+4.406017337,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.738546 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4be7056fb openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Created,Message:Created container kube-scheduler-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.983343867 +0000 UTC m=+4.406046708,LastTimestamp:2026-03-20 15:37:54.983343867 +0000 UTC m=+4.406046708,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.743436 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4bf1b4a84 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Started,Message:Started container kube-controller-manager-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.994547332 +0000 UTC m=+4.417250173,LastTimestamp:2026-03-20 15:37:54.994547332 +0000 UTC m=+4.417250173,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.748263 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4bf492e55 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Started,Message:Started container kube-apiserver-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.997554773 +0000 UTC m=+4.420257614,LastTimestamp:2026-03-20 15:37:54.997554773 +0000 UTC m=+4.420257614,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.752760 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4bf557257 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.998358615 +0000 UTC m=+4.421061456,LastTimestamp:2026-03-20 15:37:54.998358615 +0000 UTC m=+4.421061456,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.757475 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4bfc86532 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Started,Message:Started container kube-scheduler-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.00589189 +0000 UTC m=+4.428594731,LastTimestamp:2026-03-20 15:37:55.00589189 +0000 UTC m=+4.428594731,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.763657 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4bfda4f9c openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.007066012 +0000 UTC m=+4.429768863,LastTimestamp:2026-03-20 15:37:55.007066012 +0000 UTC m=+4.429768863,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.768622 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4ca08e59c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Created,Message:Created container kube-apiserver-cert-regeneration-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.177891228 +0000 UTC m=+4.600594069,LastTimestamp:2026-03-20 15:37:55.177891228 +0000 UTC m=+4.600594069,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.774291 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4ca397abe openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Created,Message:Created container kube-scheduler-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.181075134 +0000 UTC m=+4.603777965,LastTimestamp:2026-03-20 15:37:55.181075134 +0000 UTC m=+4.603777965,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.778921 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4cac6dce4 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Started,Message:Started container kube-apiserver-cert-regeneration-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.190340836 +0000 UTC m=+4.613043677,LastTimestamp:2026-03-20 15:37:55.190340836 +0000 UTC m=+4.613043677,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.783193 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4cad63f52 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.191349074 +0000 UTC m=+4.614051915,LastTimestamp:2026-03-20 15:37:55.191349074 +0000 UTC m=+4.614051915,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.787799 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e96c4caf5777d openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Started,Message:Started container kube-scheduler-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.193395069 +0000 UTC m=+4.616097910,LastTimestamp:2026-03-20 15:37:55.193395069 +0000 UTC m=+4.616097910,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.792663 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c4d2e8ae67 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.326774887 +0000 UTC m=+4.749477748,LastTimestamp:2026-03-20 15:37:55.326774887 +0000 UTC m=+4.749477748,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.799688 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4d4e2c3f3 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Created,Message:Created container kube-apiserver-insecure-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.359941619 +0000 UTC m=+4.782644460,LastTimestamp:2026-03-20 15:37:55.359941619 +0000 UTC m=+4.782644460,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.803192 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4d5d32ac1 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Started,Message:Started container kube-apiserver-insecure-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.375696577 +0000 UTC m=+4.798399418,LastTimestamp:2026-03-20 15:37:55.375696577 +0000 UTC m=+4.798399418,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.807152 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4d5e06609 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.376563721 +0000 UTC m=+4.799266562,LastTimestamp:2026-03-20 15:37:55.376563721 +0000 UTC m=+4.799266562,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.811087 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c4df22e73b openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Created,Message:Created container etcd-resources-copy,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.531917115 +0000 UTC m=+4.954619956,LastTimestamp:2026-03-20 15:37:55.531917115 +0000 UTC m=+4.954619956,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.812664 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c4dfefeea3 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Started,Message:Started container etcd-resources-copy,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.545353891 +0000 UTC m=+4.968056732,LastTimestamp:2026-03-20 15:37:55.545353891 +0000 UTC m=+4.968056732,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.816454 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4e0c06354 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Created,Message:Created container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.559015252 +0000 UTC m=+4.981718093,LastTimestamp:2026-03-20 15:37:55.559015252 +0000 UTC m=+4.981718093,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.822041 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4e1afa014 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Started,Message:Started container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.574693908 +0000 UTC m=+4.997396749,LastTimestamp:2026-03-20 15:37:55.574693908 +0000 UTC m=+4.997396749,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.828824 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c50f5874fe openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.340733182 +0000 UTC m=+5.763436063,LastTimestamp:2026-03-20 15:37:56.340733182 +0000 UTC m=+5.763436063,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.834427 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c51b0afaf3 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Created,Message:Created container etcdctl,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.536982259 +0000 UTC m=+5.959685100,LastTimestamp:2026-03-20 15:37:56.536982259 +0000 UTC m=+5.959685100,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.841234 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c51bbf6250 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Started,Message:Started container etcdctl,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.5488052 +0000 UTC m=+5.971508051,LastTimestamp:2026-03-20 15:37:56.5488052 +0000 UTC m=+5.971508051,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.845802 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c51bcad75d openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.549556061 +0000 UTC m=+5.972258902,LastTimestamp:2026-03-20 15:37:56.549556061 +0000 UTC m=+5.972258902,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.850470 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c525db8ed4 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Created,Message:Created container etcd,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.718423764 +0000 UTC m=+6.141126645,LastTimestamp:2026-03-20 15:37:56.718423764 +0000 UTC m=+6.141126645,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.855797 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c52705dfe5 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Started,Message:Started container etcd,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.737974245 +0000 UTC m=+6.160677076,LastTimestamp:2026-03-20 15:37:56.737974245 +0000 UTC m=+6.160677076,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.859649 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c5271152f4 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.738724596 +0000 UTC m=+6.161427437,LastTimestamp:2026-03-20 15:37:56.738724596 +0000 UTC m=+6.161427437,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.864351 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c532411e2e openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Created,Message:Created container etcd-metrics,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.92640619 +0000 UTC m=+6.349109031,LastTimestamp:2026-03-20 15:37:56.92640619 +0000 UTC m=+6.349109031,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.868443 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c5331bd866 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Started,Message:Started container etcd-metrics,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.94074071 +0000 UTC m=+6.363443551,LastTimestamp:2026-03-20 15:37:56.94074071 +0000 UTC m=+6.363443551,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.873024 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c5332b7760 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:56.941764448 +0000 UTC m=+6.364467299,LastTimestamp:2026-03-20 15:37:56.941764448 +0000 UTC m=+6.364467299,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.877387 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c53facd26d openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Created,Message:Created container etcd-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:57.151568493 +0000 UTC m=+6.574271364,LastTimestamp:2026-03-20 15:37:57.151568493 +0000 UTC m=+6.574271364,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.882594 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c541a56157 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Started,Message:Started container etcd-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:57.184635223 +0000 UTC m=+6.607338094,LastTimestamp:2026-03-20 15:37:57.184635223 +0000 UTC m=+6.607338094,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.887982 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c541bbea03 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:57.186112003 +0000 UTC m=+6.608814864,LastTimestamp:2026-03-20 15:37:57.186112003 +0000 UTC m=+6.608814864,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.895241 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c54e15da02 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Created,Message:Created container etcd-rev,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:57.393332738 +0000 UTC m=+6.816035569,LastTimestamp:2026-03-20 15:37:57.393332738 +0000 UTC m=+6.816035569,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.901664 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e96c54eecadff openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Started,Message:Started container etcd-rev,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:57.407411711 +0000 UTC m=+6.830114562,LastTimestamp:2026-03-20 15:37:57.407411711 +0000 UTC m=+6.830114562,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.909380 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 15:38:46 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc7e7e3 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 15:38:46 crc kubenswrapper[4813]: body: Mar 20 15:38:46 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918072803 +0000 UTC m=+11.340775684,LastTimestamp:2026-03-20 15:38:01.918072803 +0000 UTC m=+11.340775684,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:46 crc kubenswrapper[4813]: > Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.916945 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc8e267 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918136935 +0000 UTC m=+11.340839816,LastTimestamp:2026-03-20 15:38:01.918136935 +0000 UTC m=+11.340839816,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.920812 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event=< Mar 20 15:38:46 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-apiserver-crc.189e96c75f22b390 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:ProbeError,Message:Liveness probe error: Get "https://192.168.126.11:17697/healthz": read tcp 192.168.126.11:51138->192.168.126.11:17697: read: connection reset by peer Mar 20 15:38:46 crc kubenswrapper[4813]: body: Mar 20 15:38:46 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:06.269322128 +0000 UTC m=+15.692024979,LastTimestamp:2026-03-20 15:38:06.269322128 +0000 UTC m=+15.692024979,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:46 crc kubenswrapper[4813]: > Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.925629 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c75f237ab5 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Unhealthy,Message:Liveness probe failed: Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51138->192.168.126.11:17697: read: connection reset by peer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:06.269373109 +0000 UTC m=+15.692075950,LastTimestamp:2026-03-20 15:38:06.269373109 +0000 UTC m=+15.692075950,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.930636 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event=< Mar 20 15:38:46 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-apiserver-crc.189e96c765d1bfa5 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:ProbeError,Message:Startup probe error: HTTP probe failed with statuscode: 403 Mar 20 15:38:46 crc kubenswrapper[4813]: body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 15:38:46 crc kubenswrapper[4813]: Mar 20 15:38:46 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:06.381457317 +0000 UTC m=+15.804160168,LastTimestamp:2026-03-20 15:38:06.381457317 +0000 UTC m=+15.804160168,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:46 crc kubenswrapper[4813]: > Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.934461 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e96c4d5e06609\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c4d5e06609 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:55.376563721 +0000 UTC m=+4.799266562,LastTimestamp:2026-03-20 15:38:06.381488068 +0000 UTC m=+15.804190909,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.938175 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c765d2919d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Unhealthy,Message:Startup probe failed: HTTP probe failed with statuscode: 403,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:06.381511069 +0000 UTC m=+15.804213920,LastTimestamp:2026-03-20 15:38:06.381511069 +0000 UTC m=+15.804213920,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.943720 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e96c765d1bfa5\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event=< Mar 20 15:38:46 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-apiserver-crc.189e96c765d1bfa5 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:ProbeError,Message:Startup probe error: HTTP probe failed with statuscode: 403 Mar 20 15:38:46 crc kubenswrapper[4813]: body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 15:38:46 crc kubenswrapper[4813]: Mar 20 15:38:46 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:06.381457317 +0000 UTC m=+15.804160168,LastTimestamp:2026-03-20 15:38:06.396773854 +0000 UTC m=+15.819476705,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:46 crc kubenswrapper[4813]: > Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.951418 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e96c765d2919d\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e96c765d2919d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Unhealthy,Message:Startup probe failed: HTTP probe failed with statuscode: 403,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:06.381511069 +0000 UTC m=+15.804213920,LastTimestamp:2026-03-20 15:38:06.396807225 +0000 UTC m=+15.819510076,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.957252 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c65bc7e7e3\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 15:38:46 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc7e7e3 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 15:38:46 crc kubenswrapper[4813]: body: Mar 20 15:38:46 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918072803 +0000 UTC m=+11.340775684,LastTimestamp:2026-03-20 15:38:11.91836432 +0000 UTC m=+21.341067161,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:46 crc kubenswrapper[4813]: > Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.961874 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c65bc8e267\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc8e267 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918136935 +0000 UTC m=+11.340839816,LastTimestamp:2026-03-20 15:38:11.918524494 +0000 UTC m=+21.341227345,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.967567 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c65bc7e7e3\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 15:38:46 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc7e7e3 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 15:38:46 crc kubenswrapper[4813]: body: Mar 20 15:38:46 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918072803 +0000 UTC m=+11.340775684,LastTimestamp:2026-03-20 15:38:21.918299264 +0000 UTC m=+31.341002105,Count:3,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:46 crc kubenswrapper[4813]: > Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.972095 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c65bc8e267\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc8e267 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918136935 +0000 UTC m=+11.340839816,LastTimestamp:2026-03-20 15:38:21.918348555 +0000 UTC m=+31.341051396,Count:3,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.977744 4813 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96cb040a8354 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Killing,Message:Container cluster-policy-controller failed startup probe, will be restarted,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:21.920879444 +0000 UTC m=+31.343582315,LastTimestamp:2026-03-20 15:38:21.920879444 +0000 UTC m=+31.343582315,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.983079 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c451e4ff27\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c451e4ff27 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.162272551 +0000 UTC m=+2.584975402,LastTimestamp:2026-03-20 15:38:22.038329458 +0000 UTC m=+31.461032319,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.987656 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c47e249697\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c47e249697 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Created,Message:Created container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:53.904637591 +0000 UTC m=+3.327340452,LastTimestamp:2026-03-20 15:38:22.265355123 +0000 UTC m=+31.688057974,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.991981 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c4992e90fb\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c4992e90fb openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Started,Message:Started container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:37:54.358276347 +0000 UTC m=+3.780979188,LastTimestamp:2026-03-20 15:38:22.276447474 +0000 UTC m=+31.699150325,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:46 crc kubenswrapper[4813]: E0320 15:38:46.999319 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c65bc7e7e3\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 15:38:46 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc7e7e3 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 15:38:46 crc kubenswrapper[4813]: body: Mar 20 15:38:46 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918072803 +0000 UTC m=+11.340775684,LastTimestamp:2026-03-20 15:38:31.918263803 +0000 UTC m=+41.340966694,Count:4,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:46 crc kubenswrapper[4813]: > Mar 20 15:38:47 crc kubenswrapper[4813]: E0320 15:38:47.004527 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c65bc8e267\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc8e267 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918136935 +0000 UTC m=+11.340839816,LastTimestamp:2026-03-20 15:38:31.918375026 +0000 UTC m=+41.341077907,Count:4,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 15:38:47 crc kubenswrapper[4813]: E0320 15:38:47.010328 4813 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e96c65bc7e7e3\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 15:38:47 crc kubenswrapper[4813]: &Event{ObjectMeta:{kube-controller-manager-crc.189e96c65bc7e7e3 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 15:38:47 crc kubenswrapper[4813]: body: Mar 20 15:38:47 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:38:01.918072803 +0000 UTC m=+11.340775684,LastTimestamp:2026-03-20 15:38:41.91947186 +0000 UTC m=+51.342174791,Count:5,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:38:47 crc kubenswrapper[4813]: > Mar 20 15:38:47 crc kubenswrapper[4813]: I0320 15:38:47.201694 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:47 crc kubenswrapper[4813]: I0320 15:38:47.819268 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:47 crc kubenswrapper[4813]: I0320 15:38:47.820409 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:47 crc kubenswrapper[4813]: I0320 15:38:47.820435 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:47 crc kubenswrapper[4813]: I0320 15:38:47.820443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:47 crc kubenswrapper[4813]: I0320 15:38:47.820463 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:38:47 crc kubenswrapper[4813]: E0320 15:38:47.822847 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 15:38:47 crc kubenswrapper[4813]: E0320 15:38:47.823941 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.202401 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.265798 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.267386 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.267471 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.267526 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.268335 4813 scope.go:117] "RemoveContainer" containerID="4f848aa2c409273a4d1629bac5db7a9cf0f7f377e0df4508402db95cdf3f33ad" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.595239 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.596742 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06"} Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.596856 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.597563 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.597586 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.597595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.814575 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.814734 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.815851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.815883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:48 crc kubenswrapper[4813]: I0320 15:38:48.815894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.203118 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.602073 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.602613 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.604376 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" exitCode=255 Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.604430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06"} Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.604498 4813 scope.go:117] "RemoveContainer" containerID="4f848aa2c409273a4d1629bac5db7a9cf0f7f377e0df4508402db95cdf3f33ad" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.604785 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.610769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.610807 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.610820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:49 crc kubenswrapper[4813]: I0320 15:38:49.611541 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:38:49 crc kubenswrapper[4813]: E0320 15:38:49.611755 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:50 crc kubenswrapper[4813]: I0320 15:38:50.485975 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:50 crc kubenswrapper[4813]: I0320 15:38:50.609080 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.203799 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.306749 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.306977 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.308349 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.308406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.308419 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.309154 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:38:51 crc kubenswrapper[4813]: E0320 15:38:51.309403 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:51 crc kubenswrapper[4813]: E0320 15:38:51.355209 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.918313 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.919543 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.919895 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.920394 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.922552 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.922616 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.922639 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.923522 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cluster-policy-controller" containerStatusID={"Type":"cri-o","ID":"d2c6b6be5a3c802947f83131f64a56b0438a1a15b908524c54cee55759b168ce"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container cluster-policy-controller failed startup probe, will be restarted" Mar 20 15:38:51 crc kubenswrapper[4813]: I0320 15:38:51.923727 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" containerID="cri-o://d2c6b6be5a3c802947f83131f64a56b0438a1a15b908524c54cee55759b168ce" gracePeriod=30 Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.203371 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.621534 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/1.log" Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.624156 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.624984 4813 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d2c6b6be5a3c802947f83131f64a56b0438a1a15b908524c54cee55759b168ce" exitCode=255 Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.625182 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d2c6b6be5a3c802947f83131f64a56b0438a1a15b908524c54cee55759b168ce"} Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.625337 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ba1359f3d02ec309b720d41a578edee948d6fe134c2f110ccf3e20a0cb745a8e"} Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.625478 4813 scope.go:117] "RemoveContainer" containerID="a9511d1d58aa16081ae5deb6c8739f1dba7f6bb0f9dfdbf4ba666c26e9d71e47" Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.625830 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.627772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.628004 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:52 crc kubenswrapper[4813]: I0320 15:38:52.628184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:53 crc kubenswrapper[4813]: I0320 15:38:53.202911 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:53 crc kubenswrapper[4813]: I0320 15:38:53.630448 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/1.log" Mar 20 15:38:54 crc kubenswrapper[4813]: I0320 15:38:54.203078 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:54 crc kubenswrapper[4813]: I0320 15:38:54.824845 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:54 crc kubenswrapper[4813]: I0320 15:38:54.826954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:54 crc kubenswrapper[4813]: I0320 15:38:54.827054 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:54 crc kubenswrapper[4813]: I0320 15:38:54.827079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:54 crc kubenswrapper[4813]: I0320 15:38:54.827120 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:38:54 crc kubenswrapper[4813]: E0320 15:38:54.828793 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 15:38:54 crc kubenswrapper[4813]: E0320 15:38:54.828842 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 15:38:55 crc kubenswrapper[4813]: I0320 15:38:55.203696 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:55 crc kubenswrapper[4813]: I0320 15:38:55.218083 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:38:55 crc kubenswrapper[4813]: I0320 15:38:55.218451 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:55 crc kubenswrapper[4813]: I0320 15:38:55.220082 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:55 crc kubenswrapper[4813]: I0320 15:38:55.220178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:55 crc kubenswrapper[4813]: I0320 15:38:55.220190 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:55 crc kubenswrapper[4813]: I0320 15:38:55.220751 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:38:55 crc kubenswrapper[4813]: E0320 15:38:55.220925 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:38:56 crc kubenswrapper[4813]: I0320 15:38:56.202559 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:56 crc kubenswrapper[4813]: I0320 15:38:56.793102 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:56 crc kubenswrapper[4813]: I0320 15:38:56.793267 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:56 crc kubenswrapper[4813]: I0320 15:38:56.795005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:56 crc kubenswrapper[4813]: I0320 15:38:56.795135 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:56 crc kubenswrapper[4813]: I0320 15:38:56.795230 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:57 crc kubenswrapper[4813]: I0320 15:38:57.202734 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:58 crc kubenswrapper[4813]: I0320 15:38:58.203969 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:58 crc kubenswrapper[4813]: I0320 15:38:58.917851 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:58 crc kubenswrapper[4813]: I0320 15:38:58.918219 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:58 crc kubenswrapper[4813]: I0320 15:38:58.919407 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:58 crc kubenswrapper[4813]: I0320 15:38:58.919546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:58 crc kubenswrapper[4813]: I0320 15:38:58.919795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:38:58 crc kubenswrapper[4813]: I0320 15:38:58.923269 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:38:59 crc kubenswrapper[4813]: I0320 15:38:59.202747 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:38:59 crc kubenswrapper[4813]: I0320 15:38:59.646444 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:38:59 crc kubenswrapper[4813]: I0320 15:38:59.647305 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:38:59 crc kubenswrapper[4813]: I0320 15:38:59.647426 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:38:59 crc kubenswrapper[4813]: I0320 15:38:59.647523 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:00 crc kubenswrapper[4813]: I0320 15:39:00.201698 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:39:01 crc kubenswrapper[4813]: I0320 15:39:01.200883 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:39:01 crc kubenswrapper[4813]: E0320 15:39:01.356396 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 15:39:01 crc kubenswrapper[4813]: I0320 15:39:01.829221 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:39:01 crc kubenswrapper[4813]: I0320 15:39:01.830502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:01 crc kubenswrapper[4813]: I0320 15:39:01.830600 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:01 crc kubenswrapper[4813]: I0320 15:39:01.830667 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:01 crc kubenswrapper[4813]: I0320 15:39:01.830742 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:39:01 crc kubenswrapper[4813]: E0320 15:39:01.834633 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 15:39:01 crc kubenswrapper[4813]: E0320 15:39:01.835047 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 15:39:02 crc kubenswrapper[4813]: I0320 15:39:02.201579 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:39:03 crc kubenswrapper[4813]: I0320 15:39:03.028240 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 15:39:03 crc kubenswrapper[4813]: I0320 15:39:03.045151 4813 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Mar 20 15:39:03 crc kubenswrapper[4813]: I0320 15:39:03.203001 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:39:04 crc kubenswrapper[4813]: I0320 15:39:04.202052 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:39:05 crc kubenswrapper[4813]: I0320 15:39:05.203210 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.202324 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.310920 4813 csr.go:261] certificate signing request csr-mn7tq is approved, waiting to be issued Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.321348 4813 csr.go:257] certificate signing request csr-mn7tq is issued Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.410346 4813 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.799514 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.799622 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.800781 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.800815 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:06 crc kubenswrapper[4813]: I0320 15:39:06.800825 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:07 crc kubenswrapper[4813]: I0320 15:39:07.048378 4813 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Mar 20 15:39:07 crc kubenswrapper[4813]: I0320 15:39:07.323581 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-11-18 02:50:44.747594465 +0000 UTC Mar 20 15:39:07 crc kubenswrapper[4813]: I0320 15:39:07.324552 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 5819h11m37.423125144s for next certificate rotation Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.265310 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.266850 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.267053 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.267257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.268593 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.269144 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.834763 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.837164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.837233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.837275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.837513 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.848298 4813 kubelet_node_status.go:115] "Node was previously registered" node="crc" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.848834 4813 kubelet_node_status.go:79] "Successfully registered node" node="crc" Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.848885 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": node \"crc\" not found" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.854225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.854686 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.854975 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.855258 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.855749 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:08Z","lastTransitionTime":"2026-03-20T15:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.875840 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.887956 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.888017 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.888029 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.888066 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.888080 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:08Z","lastTransitionTime":"2026-03-20T15:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.903412 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.911617 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.911858 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.912075 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.912292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.912563 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:08Z","lastTransitionTime":"2026-03-20T15:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.927986 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.935714 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.935770 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.935790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.935815 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:08 crc kubenswrapper[4813]: I0320 15:39:08.935834 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:08Z","lastTransitionTime":"2026-03-20T15:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.946119 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.946759 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:39:08 crc kubenswrapper[4813]: E0320 15:39:08.946936 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.047644 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.148994 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.249441 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.350549 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.451537 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.552961 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.653353 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.753931 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.855082 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:09 crc kubenswrapper[4813]: E0320 15:39:09.955985 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.056117 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.156643 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.170800 4813 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.259285 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.259320 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.259344 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.259360 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.259371 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.362229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.362286 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.362309 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.362339 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.362363 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.464996 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.465057 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.465079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.465106 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.465129 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.486232 4813 apiserver.go:52] "Watching apiserver" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.490078 4813 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.490326 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.490764 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.490804 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.490841 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.490865 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.490912 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.491310 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.491387 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.491415 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.491463 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.493698 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.493864 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.493953 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.494005 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.494094 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.494127 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.494316 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.495368 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.495449 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.502368 4813 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.517853 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.524040 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.536707 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539392 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539424 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539443 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539459 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539474 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539513 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539530 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539548 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539586 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539602 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539618 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539632 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539648 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539665 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539684 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539701 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539716 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539758 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539777 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539793 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539810 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539877 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539900 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539921 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539947 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539971 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.539993 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540064 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540086 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540110 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540130 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540153 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540177 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540199 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540222 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540243 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540268 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540289 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540288 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540310 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540332 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540354 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540374 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540404 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540436 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540457 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540459 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540477 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540512 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540535 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540555 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540575 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540598 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540619 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540642 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540668 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540694 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540717 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540739 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540760 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540780 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540799 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540820 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540840 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540858 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540880 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540900 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540919 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540940 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540962 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540984 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541006 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541026 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541050 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541076 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541099 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541126 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541150 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541172 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541194 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541215 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541237 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541257 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541278 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541297 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541321 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541345 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541364 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541384 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541408 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541432 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541452 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541473 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541516 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541535 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541557 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541577 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541599 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541643 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541662 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541683 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541703 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541725 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541746 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541768 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541790 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541811 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541829 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541851 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541874 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541896 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541918 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541937 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541959 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541984 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542005 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542028 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542051 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542071 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542091 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542113 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542136 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542156 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542175 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542196 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542216 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542236 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542257 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542278 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542302 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542324 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542344 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542365 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542390 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542412 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542437 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542460 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542497 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542522 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542544 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542565 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542586 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542604 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542623 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542645 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542671 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542693 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542715 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542743 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542766 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542788 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542805 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542821 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542838 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542854 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542872 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542889 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542910 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542934 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543011 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543036 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543060 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543086 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543110 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543134 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543158 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543183 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543214 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543242 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543265 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543288 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543311 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543335 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543352 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543368 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543384 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543401 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543418 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543442 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543464 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543519 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543540 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543558 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543592 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543611 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543629 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543647 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543664 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543689 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543705 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543721 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543739 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543757 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543774 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543792 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543809 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543826 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543896 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543920 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543943 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543965 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543985 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544003 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544023 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544043 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544060 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544079 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544097 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544116 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544134 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544152 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544201 4813 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544217 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544228 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540503 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540620 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540867 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.540879 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541062 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541091 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541107 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541134 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541224 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541382 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541475 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541701 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541756 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.541989 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542013 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542027 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542161 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542276 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542310 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542367 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542532 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542626 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542697 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542899 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542920 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.542935 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543157 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543175 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543396 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543423 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543560 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543611 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543798 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.543830 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544115 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544213 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.547785 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544391 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544424 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544431 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544648 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544637 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544828 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.544862 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545171 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545233 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545251 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545263 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545282 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545345 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545601 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.545673 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.546007 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.546083 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.546193 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.546193 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.546385 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.546898 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.546933 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.547369 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.547954 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:39:11.047905811 +0000 UTC m=+80.470608652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.547970 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548006 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548145 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548301 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548400 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548449 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548463 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548615 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548789 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548826 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548874 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548919 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548938 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.549091 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.549187 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.549279 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.549453 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.549549 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.549555 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.549847 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.548446 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550051 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550173 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550229 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550259 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550326 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550688 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550725 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.550838 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551224 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551373 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551442 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551796 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551790 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551826 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551814 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553610 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552963 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.551945 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552108 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552140 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552207 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552441 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552441 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552555 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552594 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552652 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552675 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.552766 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553808 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553828 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553278 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553288 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553330 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553377 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553865 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.553963 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.554298 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.554389 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.554400 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.554447 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.554823 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.554870 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555079 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555207 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555322 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555548 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555699 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555836 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555942 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.555974 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556459 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556496 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556584 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556712 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556791 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556872 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556898 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556905 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.556935 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.557060 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.557259 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.557425 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.557463 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.557639 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.557680 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.558283 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.558460 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.558770 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.558947 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.559126 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.559143 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.558875 4813 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.559319 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.560274 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.560274 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.560327 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.560497 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.560512 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.560696 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.560731 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.560878 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.560960 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:11.060938254 +0000 UTC m=+80.483641175 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.561035 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.561236 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.561412 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.561668 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:11.061653982 +0000 UTC m=+80.484356933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.561769 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.562069 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.562325 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.563220 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.567532 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.568085 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.568124 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.568143 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.568170 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.568190 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.572311 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.573928 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.574842 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.575387 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.575596 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.575641 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.575931 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.576163 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.576276 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.576498 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.576698 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.577515 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.577965 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.578036 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.582023 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.582270 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.582468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.582553 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.584770 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.584821 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.585546 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.585574 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.585620 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.586214 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.587807 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.587836 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.587850 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.587864 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.587911 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:11.087894284 +0000 UTC m=+80.510597135 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.587940 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.587982 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.587979 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.588008 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.588100 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:11.088068488 +0000 UTC m=+80.510771409 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.589555 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.593277 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.593909 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.594672 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.600218 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.604843 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.605411 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.612565 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.612645 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.617046 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.622120 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.632191 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644596 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644703 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644809 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644840 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644860 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644878 4813 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644897 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644895 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644915 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644933 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.644967 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645008 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645091 4813 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645114 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645139 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645180 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645208 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645222 4813 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645237 4813 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645250 4813 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645263 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645275 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645287 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645311 4813 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645329 4813 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645347 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645363 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645374 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645386 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645397 4813 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645408 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645419 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645431 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645442 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645454 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645465 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645476 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645510 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645522 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645533 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645545 4813 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645557 4813 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645569 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645582 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645594 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645606 4813 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645618 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645629 4813 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645642 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645654 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645667 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645680 4813 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645694 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645705 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645717 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645728 4813 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645739 4813 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645751 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645762 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645773 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645784 4813 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645795 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645808 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645820 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645832 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645844 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645856 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645869 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645880 4813 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645891 4813 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645905 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645916 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645927 4813 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645940 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645953 4813 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645964 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645974 4813 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645986 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.645997 4813 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646008 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646019 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646031 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646042 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646054 4813 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646064 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646076 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646087 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646098 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646108 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646120 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646131 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646142 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646153 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646164 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646174 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646187 4813 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646198 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646211 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646223 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646235 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646246 4813 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646258 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646269 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646280 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646292 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646306 4813 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646322 4813 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646337 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646349 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646361 4813 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646372 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646384 4813 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646396 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646408 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646419 4813 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646431 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646442 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646455 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646467 4813 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646499 4813 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646511 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646523 4813 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646535 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646547 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646559 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646571 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646582 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646594 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646605 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646616 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646628 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646640 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646652 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646666 4813 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646680 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646693 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646707 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646720 4813 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646734 4813 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646747 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646761 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646774 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646787 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646800 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646812 4813 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646825 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646838 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646850 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646863 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646876 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646889 4813 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646901 4813 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646914 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646927 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646939 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646954 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646966 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.646999 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647010 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647021 4813 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647032 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647044 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647055 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647067 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647078 4813 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647089 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647100 4813 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647113 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647125 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647136 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647148 4813 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647159 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647171 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647182 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647193 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647204 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647215 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647227 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647239 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647250 4813 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647262 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647273 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647285 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647296 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647311 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647327 4813 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647343 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647361 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647373 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647385 4813 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647396 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647407 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647418 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647430 4813 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.647440 4813 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.671088 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.671121 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.671133 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.671149 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.671162 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.774047 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.774295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.774321 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.774351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.774371 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.805250 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.814523 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.819062 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.859840 4813 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 15:39:10 crc kubenswrapper[4813]: container &Container{Name:network-operator,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,Command:[/bin/bash -c #!/bin/bash Mar 20 15:39:10 crc kubenswrapper[4813]: set -o allexport Mar 20 15:39:10 crc kubenswrapper[4813]: if [[ -f /etc/kubernetes/apiserver-url.env ]]; then Mar 20 15:39:10 crc kubenswrapper[4813]: source /etc/kubernetes/apiserver-url.env Mar 20 15:39:10 crc kubenswrapper[4813]: else Mar 20 15:39:10 crc kubenswrapper[4813]: echo "Error: /etc/kubernetes/apiserver-url.env is missing" Mar 20 15:39:10 crc kubenswrapper[4813]: exit 1 Mar 20 15:39:10 crc kubenswrapper[4813]: fi Mar 20 15:39:10 crc kubenswrapper[4813]: exec /usr/bin/cluster-network-operator start --listen=0.0.0.0:9104 Mar 20 15:39:10 crc kubenswrapper[4813]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:cno,HostPort:9104,ContainerPort:9104,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:RELEASE_VERSION,Value:4.18.1,ValueFrom:nil,},EnvVar{Name:KUBE_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b97554198294bf544fbc116c94a0a1fb2ec8a4de0e926bf9d9e320135f0bee6f,ValueFrom:nil,},EnvVar{Name:KUBE_RBAC_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09,ValueFrom:nil,},EnvVar{Name:MULTUS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26,ValueFrom:nil,},EnvVar{Name:MULTUS_ADMISSION_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317,ValueFrom:nil,},EnvVar{Name:CNI_PLUGINS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc,ValueFrom:nil,},EnvVar{Name:BOND_CNI_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78,ValueFrom:nil,},EnvVar{Name:WHEREABOUTS_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4,ValueFrom:nil,},EnvVar{Name:ROUTE_OVERRRIDE_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa,ValueFrom:nil,},EnvVar{Name:MULTUS_NETWORKPOLICY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:23f833d3738d68706eb2f2868bd76bd71cee016cffa6faf5f045a60cc8c6eddd,ValueFrom:nil,},EnvVar{Name:OVN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,ValueFrom:nil,},EnvVar{Name:OVN_NB_RAFT_ELECTION_TIMER,Value:10,ValueFrom:nil,},EnvVar{Name:OVN_SB_RAFT_ELECTION_TIMER,Value:16,ValueFrom:nil,},EnvVar{Name:OVN_NORTHD_PROBE_INTERVAL,Value:10000,ValueFrom:nil,},EnvVar{Name:OVN_CONTROLLER_INACTIVITY_PROBE,Value:180000,ValueFrom:nil,},EnvVar{Name:OVN_NB_INACTIVITY_PROBE,Value:60000,ValueFrom:nil,},EnvVar{Name:EGRESS_ROUTER_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c,ValueFrom:nil,},EnvVar{Name:NETWORK_METRICS_DAEMON_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_SOURCE_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_TARGET_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_OPERATOR_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:CLOUD_NETWORK_CONFIG_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8048f1cb0be521f09749c0a489503cd56d85b68c6ca93380e082cfd693cd97a8,ValueFrom:nil,},EnvVar{Name:CLI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,ValueFrom:nil,},EnvVar{Name:FRR_K8S_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5dbf844e49bb46b78586930149e5e5f5dc121014c8afd10fe36f3651967cc256,ValueFrom:nil,},EnvVar{Name:NETWORKING_CONSOLE_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd,ValueFrom:nil,},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host-etc-kube,ReadOnly:true,MountPath:/etc/kubernetes,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-tls,ReadOnly:false,MountPath:/var/run/secrets/serving-cert,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rdwmf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-operator-58b4c7f79c-55gtf_openshift-network-operator(37a5e44f-9a88-4405-be8a-b645485e7312): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Mar 20 15:39:10 crc kubenswrapper[4813]: > logger="UnhandledError" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.861950 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"network-operator\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" podUID="37a5e44f-9a88-4405-be8a-b645485e7312" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.862315 4813 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 15:39:10 crc kubenswrapper[4813]: container &Container{Name:webhook,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Mar 20 15:39:10 crc kubenswrapper[4813]: if [[ -f "/env/_master" ]]; then Mar 20 15:39:10 crc kubenswrapper[4813]: set -o allexport Mar 20 15:39:10 crc kubenswrapper[4813]: source "/env/_master" Mar 20 15:39:10 crc kubenswrapper[4813]: set +o allexport Mar 20 15:39:10 crc kubenswrapper[4813]: fi Mar 20 15:39:10 crc kubenswrapper[4813]: # OVN-K will try to remove hybrid overlay node annotations even when the hybrid overlay is not enabled. Mar 20 15:39:10 crc kubenswrapper[4813]: # https://github.com/ovn-org/ovn-kubernetes/blob/ac6820df0b338a246f10f412cd5ec903bd234694/go-controller/pkg/ovn/master.go#L791 Mar 20 15:39:10 crc kubenswrapper[4813]: ho_enable="--enable-hybrid-overlay" Mar 20 15:39:10 crc kubenswrapper[4813]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start webhook" Mar 20 15:39:10 crc kubenswrapper[4813]: # extra-allowed-user: service account `ovn-kubernetes-control-plane` Mar 20 15:39:10 crc kubenswrapper[4813]: # sets pod annotations in multi-homing layer3 network controller (cluster-manager) Mar 20 15:39:10 crc kubenswrapper[4813]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Mar 20 15:39:10 crc kubenswrapper[4813]: --webhook-cert-dir="/etc/webhook-cert" \ Mar 20 15:39:10 crc kubenswrapper[4813]: --webhook-host=127.0.0.1 \ Mar 20 15:39:10 crc kubenswrapper[4813]: --webhook-port=9743 \ Mar 20 15:39:10 crc kubenswrapper[4813]: ${ho_enable} \ Mar 20 15:39:10 crc kubenswrapper[4813]: --enable-interconnect \ Mar 20 15:39:10 crc kubenswrapper[4813]: --disable-approver \ Mar 20 15:39:10 crc kubenswrapper[4813]: --extra-allowed-user="system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane" \ Mar 20 15:39:10 crc kubenswrapper[4813]: --wait-for-kubernetes-api=200s \ Mar 20 15:39:10 crc kubenswrapper[4813]: --pod-admission-conditions="/var/run/ovnkube-identity-config/additional-pod-admission-cond.json" \ Mar 20 15:39:10 crc kubenswrapper[4813]: --loglevel="${LOGLEVEL}" Mar 20 15:39:10 crc kubenswrapper[4813]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:KUBERNETES_NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/etc/webhook-cert/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Mar 20 15:39:10 crc kubenswrapper[4813]: > logger="UnhandledError" Mar 20 15:39:10 crc kubenswrapper[4813]: W0320 15:39:10.862959 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-df27dbc88e6cefcc72232145ab790aeb3ce69664d101def63da2c88c2669aead WatchSource:0}: Error finding container df27dbc88e6cefcc72232145ab790aeb3ce69664d101def63da2c88c2669aead: Status 404 returned error can't find the container with id df27dbc88e6cefcc72232145ab790aeb3ce69664d101def63da2c88c2669aead Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.865757 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:iptables-alerter,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,Command:[/iptables-alerter/iptables-alerter.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONTAINER_RUNTIME_ENDPOINT,Value:unix:///run/crio/crio.sock,ValueFrom:nil,},EnvVar{Name:ALERTER_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{68157440 0} {} 65Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:iptables-alerter-script,ReadOnly:false,MountPath:/iptables-alerter,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:host-slash,ReadOnly:true,MountPath:/host,SubPath:,MountPropagation:*HostToContainer,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rczfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod iptables-alerter-4ln5h_openshift-network-operator(d75a4c96-2883-4a0b-bab2-0fab2b6c0b49): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars" logger="UnhandledError" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.865644 4813 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 15:39:10 crc kubenswrapper[4813]: container &Container{Name:approver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Mar 20 15:39:10 crc kubenswrapper[4813]: if [[ -f "/env/_master" ]]; then Mar 20 15:39:10 crc kubenswrapper[4813]: set -o allexport Mar 20 15:39:10 crc kubenswrapper[4813]: source "/env/_master" Mar 20 15:39:10 crc kubenswrapper[4813]: set +o allexport Mar 20 15:39:10 crc kubenswrapper[4813]: fi Mar 20 15:39:10 crc kubenswrapper[4813]: Mar 20 15:39:10 crc kubenswrapper[4813]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start approver" Mar 20 15:39:10 crc kubenswrapper[4813]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Mar 20 15:39:10 crc kubenswrapper[4813]: --disable-webhook \ Mar 20 15:39:10 crc kubenswrapper[4813]: --csr-acceptance-conditions="/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json" \ Mar 20 15:39:10 crc kubenswrapper[4813]: --loglevel="${LOGLEVEL}" Mar 20 15:39:10 crc kubenswrapper[4813]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:4,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Mar 20 15:39:10 crc kubenswrapper[4813]: > logger="UnhandledError" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.867031 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"webhook\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\", failed to \"StartContainer\" for \"approver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"]" pod="openshift-network-node-identity/network-node-identity-vrzqb" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" Mar 20 15:39:10 crc kubenswrapper[4813]: E0320 15:39:10.867077 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"iptables-alerter\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/iptables-alerter-4ln5h" podUID="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.876952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.877017 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.877041 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.877069 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.877092 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.980123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.980189 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.980205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.980224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:10 crc kubenswrapper[4813]: I0320 15:39:10.980238 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:10Z","lastTransitionTime":"2026-03-20T15:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.051411 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.051763 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:39:12.051720856 +0000 UTC m=+81.474423737 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.083039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.083091 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.083100 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.083118 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.083128 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.152372 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.152445 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.152536 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.152573 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152642 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152647 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152686 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152702 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:12.152688469 +0000 UTC m=+81.575391310 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152706 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152728 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152743 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:12.15272645 +0000 UTC m=+81.575429331 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152787 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152849 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152875 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152812 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:12.152785461 +0000 UTC m=+81.575488342 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.152970 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:12.152950675 +0000 UTC m=+81.575653556 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.186441 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.186542 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.186563 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.186593 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.186621 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.272619 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.273904 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.276529 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.277048 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.277904 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.280387 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.281829 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.283196 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.285546 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.287802 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.290336 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.291443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.291520 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.291540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.291562 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.291580 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.291912 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.292060 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.294177 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.294901 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.295944 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.297754 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.298967 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.301994 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.303420 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.304910 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.305820 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.308632 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.310067 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.311736 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.314051 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.316093 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.318242 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.319803 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.320076 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.322249 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.324921 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.326307 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.328256 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.329283 4813 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.329531 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.334309 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.334896 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.335380 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.336290 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.341062 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.343471 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.344559 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.346635 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.348350 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.350874 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.351992 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.354207 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.355136 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.356288 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.356775 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.357880 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.358386 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.359654 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.360122 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.360956 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.361420 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.362025 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.363166 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.364958 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.365064 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.393257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.393323 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.393341 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.393365 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.393383 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.496425 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.496569 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.496596 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.496628 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.496655 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.599185 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.599224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.599233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.599254 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.599264 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.684515 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"8642e28ef2973496d217d37a0c7c23db64e614f2c1dae003bdd2eb3c0e8857b6"} Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.686543 4813 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 15:39:11 crc kubenswrapper[4813]: container &Container{Name:network-operator,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,Command:[/bin/bash -c #!/bin/bash Mar 20 15:39:11 crc kubenswrapper[4813]: set -o allexport Mar 20 15:39:11 crc kubenswrapper[4813]: if [[ -f /etc/kubernetes/apiserver-url.env ]]; then Mar 20 15:39:11 crc kubenswrapper[4813]: source /etc/kubernetes/apiserver-url.env Mar 20 15:39:11 crc kubenswrapper[4813]: else Mar 20 15:39:11 crc kubenswrapper[4813]: echo "Error: /etc/kubernetes/apiserver-url.env is missing" Mar 20 15:39:11 crc kubenswrapper[4813]: exit 1 Mar 20 15:39:11 crc kubenswrapper[4813]: fi Mar 20 15:39:11 crc kubenswrapper[4813]: exec /usr/bin/cluster-network-operator start --listen=0.0.0.0:9104 Mar 20 15:39:11 crc kubenswrapper[4813]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:cno,HostPort:9104,ContainerPort:9104,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:RELEASE_VERSION,Value:4.18.1,ValueFrom:nil,},EnvVar{Name:KUBE_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b97554198294bf544fbc116c94a0a1fb2ec8a4de0e926bf9d9e320135f0bee6f,ValueFrom:nil,},EnvVar{Name:KUBE_RBAC_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09,ValueFrom:nil,},EnvVar{Name:MULTUS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26,ValueFrom:nil,},EnvVar{Name:MULTUS_ADMISSION_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317,ValueFrom:nil,},EnvVar{Name:CNI_PLUGINS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc,ValueFrom:nil,},EnvVar{Name:BOND_CNI_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78,ValueFrom:nil,},EnvVar{Name:WHEREABOUTS_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4,ValueFrom:nil,},EnvVar{Name:ROUTE_OVERRRIDE_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa,ValueFrom:nil,},EnvVar{Name:MULTUS_NETWORKPOLICY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:23f833d3738d68706eb2f2868bd76bd71cee016cffa6faf5f045a60cc8c6eddd,ValueFrom:nil,},EnvVar{Name:OVN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,ValueFrom:nil,},EnvVar{Name:OVN_NB_RAFT_ELECTION_TIMER,Value:10,ValueFrom:nil,},EnvVar{Name:OVN_SB_RAFT_ELECTION_TIMER,Value:16,ValueFrom:nil,},EnvVar{Name:OVN_NORTHD_PROBE_INTERVAL,Value:10000,ValueFrom:nil,},EnvVar{Name:OVN_CONTROLLER_INACTIVITY_PROBE,Value:180000,ValueFrom:nil,},EnvVar{Name:OVN_NB_INACTIVITY_PROBE,Value:60000,ValueFrom:nil,},EnvVar{Name:EGRESS_ROUTER_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c,ValueFrom:nil,},EnvVar{Name:NETWORK_METRICS_DAEMON_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_SOURCE_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_TARGET_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_OPERATOR_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:CLOUD_NETWORK_CONFIG_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8048f1cb0be521f09749c0a489503cd56d85b68c6ca93380e082cfd693cd97a8,ValueFrom:nil,},EnvVar{Name:CLI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,ValueFrom:nil,},EnvVar{Name:FRR_K8S_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5dbf844e49bb46b78586930149e5e5f5dc121014c8afd10fe36f3651967cc256,ValueFrom:nil,},EnvVar{Name:NETWORKING_CONSOLE_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd,ValueFrom:nil,},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host-etc-kube,ReadOnly:true,MountPath:/etc/kubernetes,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-tls,ReadOnly:false,MountPath:/var/run/secrets/serving-cert,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rdwmf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-operator-58b4c7f79c-55gtf_openshift-network-operator(37a5e44f-9a88-4405-be8a-b645485e7312): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Mar 20 15:39:11 crc kubenswrapper[4813]: > logger="UnhandledError" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.687448 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5ebf1fd6bc574247a3d005adfc06c6b9a4546f241a33b97122d3244821af8306"} Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.687840 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"network-operator\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" podUID="37a5e44f-9a88-4405-be8a-b645485e7312" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.688520 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"df27dbc88e6cefcc72232145ab790aeb3ce69664d101def63da2c88c2669aead"} Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.690627 4813 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 15:39:11 crc kubenswrapper[4813]: container &Container{Name:webhook,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Mar 20 15:39:11 crc kubenswrapper[4813]: if [[ -f "/env/_master" ]]; then Mar 20 15:39:11 crc kubenswrapper[4813]: set -o allexport Mar 20 15:39:11 crc kubenswrapper[4813]: source "/env/_master" Mar 20 15:39:11 crc kubenswrapper[4813]: set +o allexport Mar 20 15:39:11 crc kubenswrapper[4813]: fi Mar 20 15:39:11 crc kubenswrapper[4813]: # OVN-K will try to remove hybrid overlay node annotations even when the hybrid overlay is not enabled. Mar 20 15:39:11 crc kubenswrapper[4813]: # https://github.com/ovn-org/ovn-kubernetes/blob/ac6820df0b338a246f10f412cd5ec903bd234694/go-controller/pkg/ovn/master.go#L791 Mar 20 15:39:11 crc kubenswrapper[4813]: ho_enable="--enable-hybrid-overlay" Mar 20 15:39:11 crc kubenswrapper[4813]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start webhook" Mar 20 15:39:11 crc kubenswrapper[4813]: # extra-allowed-user: service account `ovn-kubernetes-control-plane` Mar 20 15:39:11 crc kubenswrapper[4813]: # sets pod annotations in multi-homing layer3 network controller (cluster-manager) Mar 20 15:39:11 crc kubenswrapper[4813]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Mar 20 15:39:11 crc kubenswrapper[4813]: --webhook-cert-dir="/etc/webhook-cert" \ Mar 20 15:39:11 crc kubenswrapper[4813]: --webhook-host=127.0.0.1 \ Mar 20 15:39:11 crc kubenswrapper[4813]: --webhook-port=9743 \ Mar 20 15:39:11 crc kubenswrapper[4813]: ${ho_enable} \ Mar 20 15:39:11 crc kubenswrapper[4813]: --enable-interconnect \ Mar 20 15:39:11 crc kubenswrapper[4813]: --disable-approver \ Mar 20 15:39:11 crc kubenswrapper[4813]: --extra-allowed-user="system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane" \ Mar 20 15:39:11 crc kubenswrapper[4813]: --wait-for-kubernetes-api=200s \ Mar 20 15:39:11 crc kubenswrapper[4813]: --pod-admission-conditions="/var/run/ovnkube-identity-config/additional-pod-admission-cond.json" \ Mar 20 15:39:11 crc kubenswrapper[4813]: --loglevel="${LOGLEVEL}" Mar 20 15:39:11 crc kubenswrapper[4813]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:KUBERNETES_NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/etc/webhook-cert/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Mar 20 15:39:11 crc kubenswrapper[4813]: > logger="UnhandledError" Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.690600 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:iptables-alerter,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,Command:[/iptables-alerter/iptables-alerter.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONTAINER_RUNTIME_ENDPOINT,Value:unix:///run/crio/crio.sock,ValueFrom:nil,},EnvVar{Name:ALERTER_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{68157440 0} {} 65Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:iptables-alerter-script,ReadOnly:false,MountPath:/iptables-alerter,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:host-slash,ReadOnly:true,MountPath:/host,SubPath:,MountPropagation:*HostToContainer,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rczfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod iptables-alerter-4ln5h_openshift-network-operator(d75a4c96-2883-4a0b-bab2-0fab2b6c0b49): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars" logger="UnhandledError" Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.693802 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"iptables-alerter\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/iptables-alerter-4ln5h" podUID="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.696193 4813 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 15:39:11 crc kubenswrapper[4813]: container &Container{Name:approver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Mar 20 15:39:11 crc kubenswrapper[4813]: if [[ -f "/env/_master" ]]; then Mar 20 15:39:11 crc kubenswrapper[4813]: set -o allexport Mar 20 15:39:11 crc kubenswrapper[4813]: source "/env/_master" Mar 20 15:39:11 crc kubenswrapper[4813]: set +o allexport Mar 20 15:39:11 crc kubenswrapper[4813]: fi Mar 20 15:39:11 crc kubenswrapper[4813]: Mar 20 15:39:11 crc kubenswrapper[4813]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start approver" Mar 20 15:39:11 crc kubenswrapper[4813]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Mar 20 15:39:11 crc kubenswrapper[4813]: --disable-webhook \ Mar 20 15:39:11 crc kubenswrapper[4813]: --csr-acceptance-conditions="/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json" \ Mar 20 15:39:11 crc kubenswrapper[4813]: --loglevel="${LOGLEVEL}" Mar 20 15:39:11 crc kubenswrapper[4813]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:4,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Mar 20 15:39:11 crc kubenswrapper[4813]: > logger="UnhandledError" Mar 20 15:39:11 crc kubenswrapper[4813]: E0320 15:39:11.697340 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"webhook\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\", failed to \"StartContainer\" for \"approver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"]" pod="openshift-network-node-identity/network-node-identity-vrzqb" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.702128 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.702228 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.702273 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.702292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.702316 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.702335 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.715981 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.732132 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.746346 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.754564 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.762039 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.772703 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.789738 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.804114 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.806636 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.806795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.806816 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.806843 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.806872 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.818394 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.833559 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.845516 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.858179 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.867987 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.910872 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.910961 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.910975 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.911002 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:11 crc kubenswrapper[4813]: I0320 15:39:11.911017 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:11Z","lastTransitionTime":"2026-03-20T15:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.013691 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.013727 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.013734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.013747 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.013756 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.062162 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.062174 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:39:14.062155911 +0000 UTC m=+83.484858752 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.116625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.116685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.116698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.116718 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.116730 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.163326 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.163416 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.163453 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.163519 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163612 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163649 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163717 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:14.163695538 +0000 UTC m=+83.586398379 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163720 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163762 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163788 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163800 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:14.163732159 +0000 UTC m=+83.586435100 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.163860 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:14.163834931 +0000 UTC m=+83.586537812 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.164232 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.164249 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.164260 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.164314 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:14.164302393 +0000 UTC m=+83.587005354 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.220336 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.220400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.220415 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.220435 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.220447 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.264775 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.264874 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.264874 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.265391 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.265504 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:12 crc kubenswrapper[4813]: E0320 15:39:12.265372 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.322881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.322957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.322969 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.322989 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.323030 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.426111 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.426169 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.426187 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.426211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.426227 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.529042 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.529107 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.529120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.529139 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.529151 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.546086 4813 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.632202 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.632290 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.632308 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.632339 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.632362 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.735438 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.735509 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.735521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.735540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.735552 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.838824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.838925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.838954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.838988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.839013 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.924332 4813 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.942064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.942126 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.942143 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.942166 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:12 crc kubenswrapper[4813]: I0320 15:39:12.942185 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:12Z","lastTransitionTime":"2026-03-20T15:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.045278 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.045352 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.045374 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.045404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.045426 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.148313 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.148794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.148944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.149068 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.149203 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.252874 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.253113 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.253258 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.253366 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.253513 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.356441 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.356575 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.356601 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.356633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.356657 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.459178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.459216 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.459230 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.459253 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.459268 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.562588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.562875 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.563002 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.563093 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.563223 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.666279 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.666371 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.666390 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.666420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.666442 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.773398 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.773462 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.773493 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.773509 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.773547 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.877185 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.877246 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.877262 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.877287 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.877345 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.981197 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.981274 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.981298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.981331 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:13 crc kubenswrapper[4813]: I0320 15:39:13.981351 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:13Z","lastTransitionTime":"2026-03-20T15:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.082978 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.083431 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:39:18.08338596 +0000 UTC m=+87.506088841 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.085027 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.085089 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.085107 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.085132 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.085155 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.184385 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.184462 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.184545 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.184586 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184761 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184799 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184826 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184835 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184844 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184868 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184888 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184884 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:18.184857305 +0000 UTC m=+87.607560186 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184971 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.184988 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:18.184953798 +0000 UTC m=+87.607656699 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.185304 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:18.185088441 +0000 UTC m=+87.607791312 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.185327 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:18.185315227 +0000 UTC m=+87.608018098 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.187625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.187696 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.187721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.187750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.187770 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.265919 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.266101 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.266316 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.266382 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.266569 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:14 crc kubenswrapper[4813]: E0320 15:39:14.266756 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.291257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.291325 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.291343 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.291370 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.291391 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.394225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.394281 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.394299 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.394321 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.394339 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.497982 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.498050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.498070 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.498098 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.498119 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.600310 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.600367 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.600380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.600397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.600409 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.702915 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.702966 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.702982 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.703005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.703021 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.805174 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.805215 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.805225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.805240 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.805251 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.907687 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.907720 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.907748 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.907763 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:14 crc kubenswrapper[4813]: I0320 15:39:14.907775 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:14Z","lastTransitionTime":"2026-03-20T15:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.010575 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.010635 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.010652 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.010676 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.010692 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.113914 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.113965 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.113977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.113992 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.114008 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.216146 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.216234 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.216247 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.216263 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.216275 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.318469 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.318560 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.318583 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.318611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.318633 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.421321 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.421395 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.421420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.421453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.421513 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.524144 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.524201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.524259 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.524284 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.524301 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.627164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.627268 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.627291 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.627320 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.627342 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.729729 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.729799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.729816 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.729839 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.729858 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.833434 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.833554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.833588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.833619 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.833641 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.936768 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.936837 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.936854 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.936880 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:15 crc kubenswrapper[4813]: I0320 15:39:15.936901 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:15Z","lastTransitionTime":"2026-03-20T15:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.040138 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.040233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.040251 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.040306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.040328 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.143583 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.143643 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.143660 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.143684 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.143701 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.247059 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.247147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.247175 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.247205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.247229 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.265013 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.265071 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.265093 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:16 crc kubenswrapper[4813]: E0320 15:39:16.265251 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:16 crc kubenswrapper[4813]: E0320 15:39:16.265406 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:16 crc kubenswrapper[4813]: E0320 15:39:16.265544 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.350124 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.350198 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.350216 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.350239 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.350256 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.453180 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.453258 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.453284 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.453317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.453344 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.555663 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.555729 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.555746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.555769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.555785 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.659385 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.659456 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.659515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.659549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.660120 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.763948 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.764010 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.764028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.764055 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.764073 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.866799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.866864 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.866882 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.866906 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.866924 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.969668 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.969719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.969728 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.969742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:16 crc kubenswrapper[4813]: I0320 15:39:16.969757 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:16Z","lastTransitionTime":"2026-03-20T15:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.073156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.073200 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.073212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.073229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.073242 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.175930 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.175975 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.176009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.176028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.176043 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.278323 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.278392 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.278410 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.278432 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.278449 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.381539 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.381595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.381605 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.381623 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.381635 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.484622 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.484690 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.484709 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.484805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.484863 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.588297 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.588360 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.588372 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.588388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.588400 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.691408 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.691531 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.691557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.691586 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.691605 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.794184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.794246 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.794265 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.794292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.794311 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.896746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.896778 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.896787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.896800 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.896808 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.999731 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.999811 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.999830 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:17 crc kubenswrapper[4813]: I0320 15:39:17.999854 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:17.999871 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:17Z","lastTransitionTime":"2026-03-20T15:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.103359 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.103407 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.103416 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.103431 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.103440 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.124071 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.124259 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:39:26.124220586 +0000 UTC m=+95.546923467 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.206522 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.206568 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.206579 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.206597 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.206609 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.225453 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.225566 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.225606 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.225644 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225720 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225734 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225790 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225804 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225816 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225762 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225905 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:26.225831705 +0000 UTC m=+95.648534586 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225915 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225925 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225947 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:26.225928567 +0000 UTC m=+95.648631448 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.225982 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:26.225963308 +0000 UTC m=+95.648666179 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.226022 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:26.225996219 +0000 UTC m=+95.648699090 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.265522 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.265542 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.265703 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.265543 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.265761 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:18 crc kubenswrapper[4813]: E0320 15:39:18.265828 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.308876 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.308915 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.308928 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.308944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.308956 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.412845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.412906 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.412921 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.412968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.412982 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.515830 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.515877 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.515886 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.515900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.515909 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.619509 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.619573 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.619591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.619617 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.619635 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.728446 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.728551 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.728571 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.728598 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.728620 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.831887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.831929 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.831940 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.831958 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.831970 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.934738 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.934801 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.934818 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.934843 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:18 crc kubenswrapper[4813]: I0320 15:39:18.934862 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:18Z","lastTransitionTime":"2026-03-20T15:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.037934 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.037986 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.038002 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.038025 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.038042 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.050262 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.050314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.050331 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.050355 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.050374 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.067327 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.071154 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.071210 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.071227 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.071251 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.071279 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.085726 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.089451 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.089503 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.089513 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.089526 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.089535 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.101530 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.107550 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.107640 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.107665 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.107696 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.107720 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.124839 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.128736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.128791 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.128805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.128823 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.128835 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.140126 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.140410 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.142270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.142328 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.142348 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.142565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.142592 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.246177 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.246224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.246240 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.246263 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.246279 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.280065 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.280345 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.283429 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.348714 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.348789 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.348804 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.348855 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.348872 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.451428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.451470 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.451502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.451524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.451537 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.554407 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.554546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.554604 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.554646 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.554664 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.657638 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.657693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.657704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.657719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.657728 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.712311 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:39:19 crc kubenswrapper[4813]: E0320 15:39:19.712634 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.760752 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.760824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.760838 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.760860 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.760875 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.865576 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.865640 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.865651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.865675 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.865687 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.968973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.969018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.969030 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.969043 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:19 crc kubenswrapper[4813]: I0320 15:39:19.969054 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:19Z","lastTransitionTime":"2026-03-20T15:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.072544 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.072605 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.072618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.072636 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.072650 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.175406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.175505 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.175526 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.175552 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.175573 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.265786 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.265817 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.265961 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:20 crc kubenswrapper[4813]: E0320 15:39:20.266069 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:20 crc kubenswrapper[4813]: E0320 15:39:20.266226 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:20 crc kubenswrapper[4813]: E0320 15:39:20.266470 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.279741 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.279783 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.279799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.279824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.279839 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.295715 4813 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.382977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.383296 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.383518 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.383740 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.383937 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.486765 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.486820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.486837 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.486859 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.486877 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.589711 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.589829 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.589857 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.589888 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.589913 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.694051 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.694453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.694775 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.695026 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.695217 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.798560 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.798880 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.799017 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.799238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.799411 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.902269 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.902667 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.902878 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.902982 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:20 crc kubenswrapper[4813]: I0320 15:39:20.903066 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:20Z","lastTransitionTime":"2026-03-20T15:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.004861 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.004893 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.004903 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.004916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.004925 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.107730 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.107799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.107838 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.107867 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.107890 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.210401 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.210457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.210474 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.210539 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.210563 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.277652 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.292801 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.302136 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.316616 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.316850 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.316999 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.317126 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.317164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.317184 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.327752 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.340156 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.353026 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.365432 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.419781 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.419826 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.419844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.419867 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.419885 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.522344 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.522688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.522754 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.522827 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.522888 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.626031 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.626377 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.626709 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.626925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.627123 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.729540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.729604 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.729625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.729654 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.729672 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.833265 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.833355 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.833388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.833418 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.833442 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.936636 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.936677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.936689 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.936706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:21 crc kubenswrapper[4813]: I0320 15:39:21.936717 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:21Z","lastTransitionTime":"2026-03-20T15:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.039644 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.039706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.039724 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.039748 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.039766 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.142594 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.143027 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.143161 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.143313 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.143454 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.247423 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.248526 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.248777 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.249000 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.249211 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.265338 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.265391 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.265339 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:22 crc kubenswrapper[4813]: E0320 15:39:22.265551 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:22 crc kubenswrapper[4813]: E0320 15:39:22.265634 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:22 crc kubenswrapper[4813]: E0320 15:39:22.265743 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.352530 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.352606 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.352625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.352651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.352675 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.454535 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.454580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.454591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.454610 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.454622 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.557265 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.557327 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.557344 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.557363 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.557376 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.660380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.660761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.660922 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.661069 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.661211 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.764625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.764683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.764698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.764720 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.764735 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.867672 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.867742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.867763 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.867787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.867807 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.969723 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.969767 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.969779 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.969824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:22 crc kubenswrapper[4813]: I0320 15:39:22.969839 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:22Z","lastTransitionTime":"2026-03-20T15:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.072464 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.072520 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.072536 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.072553 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.072564 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.175558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.175588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.175596 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.175610 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.175618 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.278195 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.278263 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.278273 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.278286 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.278305 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.283238 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.381170 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.381212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.381220 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.381242 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.381253 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.483793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.483865 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.483886 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.483911 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.483928 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.586557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.586621 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.586640 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.586666 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.586685 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.689000 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.689074 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.689095 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.689123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.689162 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.791398 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.791434 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.791444 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.791469 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.791498 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.894013 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.894061 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.894077 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.894098 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.894115 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.996682 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.996736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.996746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.996764 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:23 crc kubenswrapper[4813]: I0320 15:39:23.996776 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:23Z","lastTransitionTime":"2026-03-20T15:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.104332 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.104400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.104424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.104453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.104516 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.206192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.206270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.206289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.206315 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.206334 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.265544 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.265616 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.265581 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:24 crc kubenswrapper[4813]: E0320 15:39:24.265715 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:24 crc kubenswrapper[4813]: E0320 15:39:24.265804 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:24 crc kubenswrapper[4813]: E0320 15:39:24.267319 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.308146 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.308189 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.308197 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.308211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.308220 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.410553 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.410835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.410954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.411105 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.411224 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.513653 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.514107 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.514208 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.514329 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.514415 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.617820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.617862 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.617876 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.617892 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.617904 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.721197 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.721264 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.721282 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.721306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.721323 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.824612 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.824686 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.824704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.824729 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.824751 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.927998 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.928060 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.928078 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.928102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:24 crc kubenswrapper[4813]: I0320 15:39:24.928121 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:24Z","lastTransitionTime":"2026-03-20T15:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.031069 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.031145 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.031174 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.031206 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.031231 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.133966 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.134289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.134557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.134747 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.134964 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.238598 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.238664 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.238678 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.238700 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.238716 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.342315 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.342380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.342397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.342425 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.342450 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.445250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.445303 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.445316 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.445333 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.445355 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.548008 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.548473 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.548707 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.548730 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.548747 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.652894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.652938 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.652954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.652977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.652995 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.729846 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.756279 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.756339 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.756357 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.756382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.756403 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.859095 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.859126 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.859136 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.859152 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.859163 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.962459 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.962599 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.962626 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.962656 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:25 crc kubenswrapper[4813]: I0320 15:39:25.962678 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:25Z","lastTransitionTime":"2026-03-20T15:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.065665 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.065774 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.065791 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.065816 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.065833 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.168207 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.168243 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.168253 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.168268 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.168279 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.207646 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.207771 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:39:42.207755376 +0000 UTC m=+111.630458217 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.265243 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.265282 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.265280 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.265588 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.265824 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.265892 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.271857 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.271909 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.271932 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.271958 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.271980 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.308354 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.308409 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.308445 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.308515 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308527 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308611 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308644 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:42.308620916 +0000 UTC m=+111.731323777 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308671 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:42.308654497 +0000 UTC m=+111.731357438 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308702 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308744 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308756 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.308818 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:42.308799431 +0000 UTC m=+111.731502272 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.310063 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.310099 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.310118 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:26 crc kubenswrapper[4813]: E0320 15:39:26.310219 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:42.310190086 +0000 UTC m=+111.732892967 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.374149 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.374196 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.374218 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.374243 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.374259 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.476357 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.476395 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.476404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.476417 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.476427 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.578885 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.578943 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.578960 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.578983 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.579000 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.681033 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.681076 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.681087 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.681102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.681114 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.734161 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.736960 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.747118 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.763306 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.781994 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.782944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.783003 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.783022 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.783372 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.783430 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.801385 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.817976 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.842934 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.865828 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.880387 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.885095 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.885159 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.885177 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.885202 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.885219 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.899806 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.937081 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.961943 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.978887 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.989108 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.989164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.989180 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.989201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.989215 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:26Z","lastTransitionTime":"2026-03-20T15:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:26 crc kubenswrapper[4813]: I0320 15:39:26.992956 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:26Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.005917 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:27Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.025744 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:27Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.040919 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:27Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.056935 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:27Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.071926 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:27Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.092799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.092835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.092844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.092857 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.092867 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.196073 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.196160 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.196181 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.196206 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.196225 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.299392 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.299444 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.299457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.299475 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.299508 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.402762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.402802 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.402814 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.402828 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.402838 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.505370 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.505424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.505437 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.505455 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.505468 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.608727 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.608791 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.608807 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.608832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.608850 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.711755 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.711816 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.711833 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.711858 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.711877 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.813806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.813849 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.813863 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.813880 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.813892 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.916270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.916309 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.916318 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.916330 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:27 crc kubenswrapper[4813]: I0320 15:39:27.916339 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:27Z","lastTransitionTime":"2026-03-20T15:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.020184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.020263 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.020275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.020293 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.020306 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.123249 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.123301 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.123314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.123332 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.123344 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.225534 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.225881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.226014 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.226153 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.226267 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.265475 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.265558 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:28 crc kubenswrapper[4813]: E0320 15:39:28.265962 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:28 crc kubenswrapper[4813]: E0320 15:39:28.266104 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.266217 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:28 crc kubenswrapper[4813]: E0320 15:39:28.266441 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.328761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.329040 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.329123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.329228 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.329301 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.431531 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.432048 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.432119 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.432186 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.432243 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.534636 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.534677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.534688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.534703 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.534715 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.637947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.637987 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.638001 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.638021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.638031 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.740817 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.740864 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.740881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.740902 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.740919 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.743298 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.760644 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.795667 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.819943 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.835095 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.842916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.842964 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.842977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.842997 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.843010 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.850588 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.862736 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.876317 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.895401 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.912384 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:28Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.945930 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.946184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.946295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.946404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:28 crc kubenswrapper[4813]: I0320 15:39:28.946514 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:28Z","lastTransitionTime":"2026-03-20T15:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.049388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.049835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.049999 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.050151 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.050292 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.153995 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.154272 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.154431 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.154629 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.154777 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.258379 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.258742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.258911 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.259050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.259199 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.362434 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.362515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.362535 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.362558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.362574 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.464905 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.464968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.464992 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.465021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.465041 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.466511 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.466590 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.466624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.466645 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.466660 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: E0320 15:39:29.487685 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:29Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.493111 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.493179 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.493205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.493234 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.493256 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: E0320 15:39:29.512156 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:29Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.516279 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.516329 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.516346 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.516369 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.516386 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: E0320 15:39:29.531302 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:29Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.535350 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.535373 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.535381 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.535392 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.535401 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: E0320 15:39:29.549153 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:29Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.552957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.552992 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.553004 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.553018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.553028 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: E0320 15:39:29.565288 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:29Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:29 crc kubenswrapper[4813]: E0320 15:39:29.565435 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.567232 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.567273 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.567288 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.567306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.567321 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.670003 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.670067 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.670092 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.670120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.670144 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.772951 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.773046 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.773066 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.773091 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.773114 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.876820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.876919 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.876943 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.876974 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.876999 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.980835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.980901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.980921 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.980947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:29 crc kubenswrapper[4813]: I0320 15:39:29.980966 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:29Z","lastTransitionTime":"2026-03-20T15:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.083824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.083883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.083901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.083924 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.083944 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.185840 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.185896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.185907 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.185923 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.185935 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.265256 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.265373 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:30 crc kubenswrapper[4813]: E0320 15:39:30.265587 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.265702 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:30 crc kubenswrapper[4813]: E0320 15:39:30.265748 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:30 crc kubenswrapper[4813]: E0320 15:39:30.265992 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.289253 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.289285 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.289297 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.289311 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.289321 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.391908 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.391964 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.391981 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.392005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.392022 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.495009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.495063 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.495079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.495102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.495121 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.597762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.597820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.597833 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.597855 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.597869 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.700639 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.700690 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.700707 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.700732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.700752 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.803541 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.803624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.803646 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.803675 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.803696 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.906679 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.906743 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.906762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.906785 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:30 crc kubenswrapper[4813]: I0320 15:39:30.906805 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:30Z","lastTransitionTime":"2026-03-20T15:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.010055 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.010145 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.010164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.010188 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.010207 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.113692 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.113784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.113805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.113831 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.113850 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.217288 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.217345 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.217365 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.217388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.217431 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.287646 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.309915 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.320967 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.321039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.321050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.321073 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.321133 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.333415 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.356763 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.382941 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.402633 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.420969 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.425836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.425892 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.425909 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.425934 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.425953 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.437561 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.452685 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.528880 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.528950 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.528968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.528993 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.529012 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.631758 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.631786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.631794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.631806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.631815 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.735323 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.735416 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.735429 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.735445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.735477 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.838839 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.838900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.838917 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.838940 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.838957 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.942623 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.942659 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.942673 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.942691 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:31 crc kubenswrapper[4813]: I0320 15:39:31.942702 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:31Z","lastTransitionTime":"2026-03-20T15:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.045451 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.045547 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.045565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.045588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.045604 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.148364 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.148422 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.148443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.148466 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.148515 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.251793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.251886 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.251909 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.251967 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.251985 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.265508 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.265635 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.265647 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:32 crc kubenswrapper[4813]: E0320 15:39:32.265834 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:32 crc kubenswrapper[4813]: E0320 15:39:32.266023 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:32 crc kubenswrapper[4813]: E0320 15:39:32.266236 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.355344 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.355411 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.355430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.355453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.355471 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.458602 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.458643 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.458655 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.458670 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.458681 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.561029 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.561099 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.561125 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.561156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.561186 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.664719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.664763 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.664782 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.664805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.664824 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.767367 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.767720 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.767806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.767913 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.767999 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.870881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.871192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.871287 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.871403 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.871525 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.974731 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.974788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.974805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.974832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:32 crc kubenswrapper[4813]: I0320 15:39:32.974849 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:32Z","lastTransitionTime":"2026-03-20T15:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.079178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.079238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.079256 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.079280 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.079298 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.085579 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-czvmm"] Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.085976 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.088185 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.088608 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.088696 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.113394 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.130561 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.146268 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.160988 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.174109 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qbv7\" (UniqueName: \"kubernetes.io/projected/e5dd2ff6-f564-457c-9781-eefecdfea677-kube-api-access-4qbv7\") pod \"node-resolver-czvmm\" (UID: \"e5dd2ff6-f564-457c-9781-eefecdfea677\") " pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.174309 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e5dd2ff6-f564-457c-9781-eefecdfea677-hosts-file\") pod \"node-resolver-czvmm\" (UID: \"e5dd2ff6-f564-457c-9781-eefecdfea677\") " pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.182280 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.182410 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.182531 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.182644 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.182738 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.198027 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.214394 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.231421 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.247608 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.265888 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.275012 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qbv7\" (UniqueName: \"kubernetes.io/projected/e5dd2ff6-f564-457c-9781-eefecdfea677-kube-api-access-4qbv7\") pod \"node-resolver-czvmm\" (UID: \"e5dd2ff6-f564-457c-9781-eefecdfea677\") " pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.275111 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e5dd2ff6-f564-457c-9781-eefecdfea677-hosts-file\") pod \"node-resolver-czvmm\" (UID: \"e5dd2ff6-f564-457c-9781-eefecdfea677\") " pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.275241 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e5dd2ff6-f564-457c-9781-eefecdfea677-hosts-file\") pod \"node-resolver-czvmm\" (UID: \"e5dd2ff6-f564-457c-9781-eefecdfea677\") " pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.280801 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.285566 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.285611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.285628 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.285650 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.285678 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.302188 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.319467 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qbv7\" (UniqueName: \"kubernetes.io/projected/e5dd2ff6-f564-457c-9781-eefecdfea677-kube-api-access-4qbv7\") pod \"node-resolver-czvmm\" (UID: \"e5dd2ff6-f564-457c-9781-eefecdfea677\") " pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.388116 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.388176 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.388192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.388212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.388228 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.406807 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-czvmm" Mar 20 15:39:33 crc kubenswrapper[4813]: W0320 15:39:33.418746 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5dd2ff6_f564_457c_9781_eefecdfea677.slice/crio-5bdb7ea103e64489d06cf6b2e91b3a31fadac1e92eb4f47a136d3f22450bb14f WatchSource:0}: Error finding container 5bdb7ea103e64489d06cf6b2e91b3a31fadac1e92eb4f47a136d3f22450bb14f: Status 404 returned error can't find the container with id 5bdb7ea103e64489d06cf6b2e91b3a31fadac1e92eb4f47a136d3f22450bb14f Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.488353 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gpmgw"] Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.488877 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-rsfpf"] Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.489279 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-l8d6t"] Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.489514 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.489654 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.489761 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496090 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496216 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496102 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496113 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496567 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496769 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496802 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496833 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496939 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496940 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.496971 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.497431 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.497504 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.497520 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.497541 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.497557 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.499268 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.509339 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.525538 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.550806 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.566682 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.577843 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-k8s-cni-cncf-io\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.577883 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-daemon-config\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.577922 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-system-cni-dir\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.577941 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-cnibin\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.577968 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/dbc04883-b38a-4b6a-bee4-f6804c8aad94-rootfs\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578008 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-cni-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578033 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-cnibin\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578095 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8979w\" (UniqueName: \"kubernetes.io/projected/dbc04883-b38a-4b6a-bee4-f6804c8aad94-kube-api-access-8979w\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578115 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-socket-dir-parent\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578129 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-kubelet\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578195 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-conf-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578270 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578295 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-etc-kubernetes\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578343 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-cni-bin\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578370 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-hostroot\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578418 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbc04883-b38a-4b6a-bee4-f6804c8aad94-mcd-auth-proxy-config\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578438 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-netns\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578532 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f0207723-5d01-4034-bdcd-5bb28ff71021-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578556 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-cni-binary-copy\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578596 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6rhf\" (UniqueName: \"kubernetes.io/projected/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-kube-api-access-c6rhf\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578622 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-os-release\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578644 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dbc04883-b38a-4b6a-bee4-f6804c8aad94-proxy-tls\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578705 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-system-cni-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578727 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-multus-certs\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578771 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f0207723-5d01-4034-bdcd-5bb28ff71021-cni-binary-copy\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578788 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9mcf\" (UniqueName: \"kubernetes.io/projected/f0207723-5d01-4034-bdcd-5bb28ff71021-kube-api-access-t9mcf\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578803 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-os-release\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.578837 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-cni-multus\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.581956 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.594785 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.605802 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.605888 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.605904 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.605925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.605939 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.608520 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.622527 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.637010 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.648562 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.660910 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.672453 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.679971 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dbc04883-b38a-4b6a-bee4-f6804c8aad94-proxy-tls\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.680017 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-system-cni-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.680125 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-system-cni-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.680041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f0207723-5d01-4034-bdcd-5bb28ff71021-cni-binary-copy\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.680190 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9mcf\" (UniqueName: \"kubernetes.io/projected/f0207723-5d01-4034-bdcd-5bb28ff71021-kube-api-access-t9mcf\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.680594 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-os-release\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.680903 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f0207723-5d01-4034-bdcd-5bb28ff71021-cni-binary-copy\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.681219 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-os-release\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.682022 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-cni-multus\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.682067 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-cni-multus\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.682105 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-multus-certs\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.682197 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-multus-certs\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.682267 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-k8s-cni-cncf-io\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.682292 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-daemon-config\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.682356 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-k8s-cni-cncf-io\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683494 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-daemon-config\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683560 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-system-cni-dir\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683588 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-cnibin\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683685 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/dbc04883-b38a-4b6a-bee4-f6804c8aad94-rootfs\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683708 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-cni-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683727 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-cnibin\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683747 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8979w\" (UniqueName: \"kubernetes.io/projected/dbc04883-b38a-4b6a-bee4-f6804c8aad94-kube-api-access-8979w\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683769 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-socket-dir-parent\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683788 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-kubelet\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683809 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-conf-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683833 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683855 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-etc-kubernetes\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683888 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-cni-bin\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683909 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-hostroot\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683960 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbc04883-b38a-4b6a-bee4-f6804c8aad94-mcd-auth-proxy-config\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684008 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-netns\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684037 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-cni-binary-copy\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684060 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6rhf\" (UniqueName: \"kubernetes.io/projected/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-kube-api-access-c6rhf\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684109 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f0207723-5d01-4034-bdcd-5bb28ff71021-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684131 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-os-release\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684120 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-socket-dir-parent\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684193 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/dbc04883-b38a-4b6a-bee4-f6804c8aad94-rootfs\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683634 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-system-cni-dir\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684248 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-kubelet\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.683658 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-cnibin\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684288 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-conf-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684375 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-multus-cni-dir\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684468 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-cnibin\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684544 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-etc-kubernetes\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684660 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-hostroot\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684785 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-var-lib-cni-bin\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.684868 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-os-release\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.685258 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-host-run-netns\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.685331 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dbc04883-b38a-4b6a-bee4-f6804c8aad94-proxy-tls\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.685386 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f0207723-5d01-4034-bdcd-5bb28ff71021-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.685519 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f0207723-5d01-4034-bdcd-5bb28ff71021-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.686099 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbc04883-b38a-4b6a-bee4-f6804c8aad94-mcd-auth-proxy-config\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.687164 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-cni-binary-copy\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.688829 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.706085 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6rhf\" (UniqueName: \"kubernetes.io/projected/a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1-kube-api-access-c6rhf\") pod \"multus-gpmgw\" (UID: \"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\") " pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.706561 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8979w\" (UniqueName: \"kubernetes.io/projected/dbc04883-b38a-4b6a-bee4-f6804c8aad94-kube-api-access-8979w\") pod \"machine-config-daemon-l8d6t\" (UID: \"dbc04883-b38a-4b6a-bee4-f6804c8aad94\") " pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.707577 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.707649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.707660 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.707673 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.707681 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.711261 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9mcf\" (UniqueName: \"kubernetes.io/projected/f0207723-5d01-4034-bdcd-5bb28ff71021-kube-api-access-t9mcf\") pod \"multus-additional-cni-plugins-rsfpf\" (UID: \"f0207723-5d01-4034-bdcd-5bb28ff71021\") " pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.715751 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.733756 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.746287 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.760973 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-czvmm" event={"ID":"e5dd2ff6-f564-457c-9781-eefecdfea677","Type":"ContainerStarted","Data":"4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.761029 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-czvmm" event={"ID":"e5dd2ff6-f564-457c-9781-eefecdfea677","Type":"ContainerStarted","Data":"5bdb7ea103e64489d06cf6b2e91b3a31fadac1e92eb4f47a136d3f22450bb14f"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.761140 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.764224 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.767142 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.767521 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.776921 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.789989 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.806877 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.810538 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.810573 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.810585 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.810603 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.810615 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.816209 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.822655 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: W0320 15:39:33.826268 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0207723_5d01_4034_bdcd_5bb28ff71021.slice/crio-b6b2d5325aa81ff89ac6077ca64705b407175a7d2794d0459ef9f541d187254b WatchSource:0}: Error finding container b6b2d5325aa81ff89ac6077ca64705b407175a7d2794d0459ef9f541d187254b: Status 404 returned error can't find the container with id b6b2d5325aa81ff89ac6077ca64705b407175a7d2794d0459ef9f541d187254b Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.830920 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gpmgw" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.835527 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.841618 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.859778 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.867667 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dvvsh"] Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.868878 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.872589 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.872684 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.872834 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.872894 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.872951 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.873193 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.873382 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.877315 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886021 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-etc-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886116 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-systemd-units\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886180 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-config\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886213 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-kubelet\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886246 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-var-lib-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886277 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-node-log\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886332 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-netd\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886396 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-env-overrides\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886425 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-bin\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886452 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-slash\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886506 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-ovn\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886547 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tms64\" (UniqueName: \"kubernetes.io/projected/32fae70f-6b1f-4935-9747-8080c9feb514-kube-api-access-tms64\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.886736 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-systemd\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.887125 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-netns\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.887215 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32fae70f-6b1f-4935-9747-8080c9feb514-ovn-node-metrics-cert\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.887257 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-script-lib\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.892810 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.892863 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-ovn-kubernetes\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.892918 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-log-socket\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.892948 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.903442 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.912911 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.912957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.912974 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.912997 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.913016 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:33Z","lastTransitionTime":"2026-03-20T15:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.918239 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.933880 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.946572 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.967004 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.982000 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994400 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-kubelet\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994462 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-var-lib-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994470 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-kubelet\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994517 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-node-log\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994533 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-var-lib-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994551 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-netd\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994582 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-env-overrides\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994592 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-netd\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994559 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-node-log\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994611 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-bin\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994661 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-bin\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995619 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-env-overrides\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.994655 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-slash\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995831 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-slash\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995875 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-ovn\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995900 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tms64\" (UniqueName: \"kubernetes.io/projected/32fae70f-6b1f-4935-9747-8080c9feb514-kube-api-access-tms64\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995929 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-netns\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995947 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-systemd\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995964 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32fae70f-6b1f-4935-9747-8080c9feb514-ovn-node-metrics-cert\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995977 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-script-lib\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.995996 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996012 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-ovn-kubernetes\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996038 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-log-socket\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996053 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996068 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-etc-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996097 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-ovn\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996135 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-systemd-units\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996402 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:33Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996541 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-ovn-kubernetes\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996521 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-log-socket\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996615 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-netns\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996103 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-systemd-units\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996678 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-config\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.997030 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-etc-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.997089 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.996066 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-systemd\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.997303 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-script-lib\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.997688 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-config\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:33 crc kubenswrapper[4813]: I0320 15:39:33.998096 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-openvswitch\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.000772 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32fae70f-6b1f-4935-9747-8080c9feb514-ovn-node-metrics-cert\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.009920 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tms64\" (UniqueName: \"kubernetes.io/projected/32fae70f-6b1f-4935-9747-8080c9feb514-kube-api-access-tms64\") pod \"ovnkube-node-dvvsh\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.012183 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.016680 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.016707 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.016716 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.016729 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.016738 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.030048 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.041069 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.050291 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.107757 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.118425 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.118458 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.118467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.118498 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.118509 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.124981 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.135914 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.193988 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:34 crc kubenswrapper[4813]: W0320 15:39:34.209587 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32fae70f_6b1f_4935_9747_8080c9feb514.slice/crio-c9009a25724d4dc7837fa33d67bf1563fa6dae84a84425f2fc0711faf86f70cf WatchSource:0}: Error finding container c9009a25724d4dc7837fa33d67bf1563fa6dae84a84425f2fc0711faf86f70cf: Status 404 returned error can't find the container with id c9009a25724d4dc7837fa33d67bf1563fa6dae84a84425f2fc0711faf86f70cf Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.221467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.221517 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.221528 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.221543 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.221554 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.265793 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.265849 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:34 crc kubenswrapper[4813]: E0320 15:39:34.265928 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.265803 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:34 crc kubenswrapper[4813]: E0320 15:39:34.266062 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:34 crc kubenswrapper[4813]: E0320 15:39:34.266156 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.323710 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.323772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.323783 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.323800 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.323811 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.426019 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.426062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.426071 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.426086 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.426096 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.528901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.528928 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.528935 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.528947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.528956 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.631298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.631391 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.631420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.631452 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.631476 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.734040 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.734424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.734437 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.734459 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.734473 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.773518 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c" exitCode=0 Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.773571 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.773628 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"c9009a25724d4dc7837fa33d67bf1563fa6dae84a84425f2fc0711faf86f70cf"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.779115 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.779167 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.779187 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"ba7e99c9f2a3086de364d3e915fb4b658b862bc5a69d147de66772a3c5366e74"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.782767 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerStarted","Data":"2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.782820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerStarted","Data":"2f7017782c06c5f99c89962930712368a194880ba529c937d0cd5a421221ba2b"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.784974 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0207723-5d01-4034-bdcd-5bb28ff71021" containerID="790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610" exitCode=0 Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.786019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerDied","Data":"790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.786064 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerStarted","Data":"b6b2d5325aa81ff89ac6077ca64705b407175a7d2794d0459ef9f541d187254b"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.793753 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.807645 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.830096 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.836621 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.836652 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.836664 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.836678 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.836689 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.847454 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.860865 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.877932 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.897085 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.910561 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.935174 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.938464 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.938538 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.938549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.938573 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.938586 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:34Z","lastTransitionTime":"2026-03-20T15:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.952185 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.966100 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.978397 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:34 crc kubenswrapper[4813]: I0320 15:39:34.992183 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:34Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.009276 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.021237 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.041683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.041726 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.041740 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.041757 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.041770 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.042775 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.053561 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.064548 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.082342 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.093967 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.105554 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.124794 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.143663 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.144404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.144465 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.144520 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.144552 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.144572 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.154938 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.172217 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.186343 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.200833 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.213942 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.247316 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.247396 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.247409 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.247426 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.247438 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.350403 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.350446 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.350456 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.350472 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.350523 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.452270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.452307 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.452318 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.452334 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.452345 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.554751 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.554782 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.554791 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.554803 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.554811 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.657575 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.657624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.657640 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.657663 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.657679 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.761502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.761554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.761565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.761580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.761591 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.801982 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.802039 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.802055 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.802070 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.814454 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerStarted","Data":"88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.835418 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.864045 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.864089 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.864100 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.864117 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.864128 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.864064 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.873546 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.885297 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.903627 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.917596 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.936010 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.964166 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.970028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.970110 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.970123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.970140 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.970152 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:35Z","lastTransitionTime":"2026-03-20T15:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:35 crc kubenswrapper[4813]: I0320 15:39:35.985835 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:35Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.004518 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.022927 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.042133 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.057027 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.073426 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.073520 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.073540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.073607 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.073630 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.082617 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.176110 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.176437 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.176445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.176459 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.176468 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.264897 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:36 crc kubenswrapper[4813]: E0320 15:39:36.265096 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.265403 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.265618 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:36 crc kubenswrapper[4813]: E0320 15:39:36.265949 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:36 crc kubenswrapper[4813]: E0320 15:39:36.266057 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.279871 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.279897 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.279905 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.279920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.279929 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.382446 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.382524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.382541 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.382577 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.382596 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.486301 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.486344 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.486355 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.486372 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.486384 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.588358 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.588418 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.588443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.588473 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.588565 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.691179 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.691241 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.691263 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.691289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.691310 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.794147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.794217 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.794239 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.794267 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.794291 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.823685 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.823767 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.826009 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0207723-5d01-4034-bdcd-5bb28ff71021" containerID="88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a" exitCode=0 Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.826074 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerDied","Data":"88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.857824 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.880130 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.897448 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.897502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.897512 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.897545 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.897554 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.907728 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.924427 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.938504 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.949217 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.964251 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.979780 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.991613 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:36Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.999704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.999760 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.999776 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.999798 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:36 crc kubenswrapper[4813]: I0320 15:39:36.999814 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:36Z","lastTransitionTime":"2026-03-20T15:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.007537 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.021331 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.039450 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.050966 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.061749 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.103000 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.103056 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.103072 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.103097 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.103112 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.206419 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.206456 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.206467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.206498 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.206510 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.309038 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.309427 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.309442 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.309461 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.309472 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.412068 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.412120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.412137 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.412156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.412171 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.515245 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.515357 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.515375 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.515438 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.515456 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.620279 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.620343 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.620361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.620387 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.620405 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.723321 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.723364 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.723376 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.723391 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.723413 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.826244 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.826318 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.826346 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.826378 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.826402 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.831397 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0207723-5d01-4034-bdcd-5bb28ff71021" containerID="2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3" exitCode=0 Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.831464 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerDied","Data":"2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.857092 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.873055 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.906932 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.929654 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.929708 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.929732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.929759 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.929783 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:37Z","lastTransitionTime":"2026-03-20T15:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.933075 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.952776 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.972598 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:37 crc kubenswrapper[4813]: I0320 15:39:37.987814 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:37Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.002923 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.017521 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.029509 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.031986 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.032142 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.032238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.032351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.032474 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.050545 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.061408 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.071765 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.084762 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.135414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.135453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.135461 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.135477 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.135500 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.238644 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.238685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.238696 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.238713 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.238776 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.264780 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.264818 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.264827 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:38 crc kubenswrapper[4813]: E0320 15:39:38.264924 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:38 crc kubenswrapper[4813]: E0320 15:39:38.265140 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:38 crc kubenswrapper[4813]: E0320 15:39:38.265745 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.343829 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.343912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.343937 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.343966 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.343989 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.448524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.448570 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.448582 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.448599 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.448610 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.551334 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.551376 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.551391 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.551407 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.551420 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.654153 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.654237 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.654254 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.654278 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.654296 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.756833 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.756901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.756916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.756938 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.756955 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.838154 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0207723-5d01-4034-bdcd-5bb28ff71021" containerID="5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77" exitCode=0 Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.838251 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerDied","Data":"5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.845644 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.861172 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.861237 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.861259 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.861293 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.861315 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.869112 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.883842 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.897845 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.913610 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.929760 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.943702 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.960799 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.964056 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.964125 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.964147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.964176 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.964196 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:38Z","lastTransitionTime":"2026-03-20T15:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.975655 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:38 crc kubenswrapper[4813]: I0320 15:39:38.986752 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.001201 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:38Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.010473 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.028866 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.040812 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.051572 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.067473 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.067531 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.067544 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.067560 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.067572 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.170323 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.170384 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.170401 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.170428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.170446 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.273935 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.273989 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.274003 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.274021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.274038 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.376293 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.376326 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.376333 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.376346 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.376354 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.483120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.483162 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.483177 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.483196 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.483210 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.587111 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.587160 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.587174 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.587198 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.587214 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.690084 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.690160 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.690183 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.690212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.690240 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.784870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.784928 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.784946 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.784970 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.784986 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: E0320 15:39:39.808621 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.814709 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.814770 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.814793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.814827 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.814848 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: E0320 15:39:39.851022 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.856328 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.856380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.856393 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.856413 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.856427 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.856602 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerStarted","Data":"0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.881476 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: E0320 15:39:39.885659 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.890420 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-vx6nr"] Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.890815 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.891627 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.891673 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.891687 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.891708 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.891725 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.892450 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.892608 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.892707 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.893015 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.897351 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: E0320 15:39:39.903272 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.905851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.905885 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.905896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.905912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.905924 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.912910 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: E0320 15:39:39.919226 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: E0320 15:39:39.919379 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.920931 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.920963 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.920975 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.920991 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.921003 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:39Z","lastTransitionTime":"2026-03-20T15:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.926639 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.937722 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.946474 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.956843 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.959356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vg7s\" (UniqueName: \"kubernetes.io/projected/e8f0d437-97b9-4f08-8136-4a13fe545b08-kube-api-access-7vg7s\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.959649 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8f0d437-97b9-4f08-8136-4a13fe545b08-host\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.959707 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e8f0d437-97b9-4f08-8136-4a13fe545b08-serviceca\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.965760 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.984065 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:39 crc kubenswrapper[4813]: I0320 15:39:39.999039 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:39Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.009949 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.023325 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.023414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.023430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.023451 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.023466 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.035628 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.050109 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.060640 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8f0d437-97b9-4f08-8136-4a13fe545b08-host\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.060693 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e8f0d437-97b9-4f08-8136-4a13fe545b08-serviceca\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.060786 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vg7s\" (UniqueName: \"kubernetes.io/projected/e8f0d437-97b9-4f08-8136-4a13fe545b08-kube-api-access-7vg7s\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.061120 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e8f0d437-97b9-4f08-8136-4a13fe545b08-host\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.062459 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e8f0d437-97b9-4f08-8136-4a13fe545b08-serviceca\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.062695 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.083263 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.084779 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vg7s\" (UniqueName: \"kubernetes.io/projected/e8f0d437-97b9-4f08-8136-4a13fe545b08-kube-api-access-7vg7s\") pod \"node-ca-vx6nr\" (UID: \"e8f0d437-97b9-4f08-8136-4a13fe545b08\") " pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.099394 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.111521 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.120903 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.125697 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.125734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.125745 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.125760 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.125772 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.134151 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.147679 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.163383 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.181338 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.197781 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.202829 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vx6nr" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.214200 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.228383 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.228431 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.228449 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.228474 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.228551 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.234811 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.253570 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.265515 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.265583 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:40 crc kubenswrapper[4813]: E0320 15:39:40.265629 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.265683 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:40 crc kubenswrapper[4813]: E0320 15:39:40.265748 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:40 crc kubenswrapper[4813]: E0320 15:39:40.265849 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.279680 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.295287 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.308614 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.331257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.331295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.331305 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.331320 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.331332 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.434201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.434236 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.434248 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.434264 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.434275 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.537648 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.537700 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.537717 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.537740 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.537759 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.641334 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.641417 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.641443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.641467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.641514 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.744622 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.744683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.744701 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.744724 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.744743 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.848784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.848847 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.848868 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.848900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.848922 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.864687 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vx6nr" event={"ID":"e8f0d437-97b9-4f08-8136-4a13fe545b08","Type":"ContainerStarted","Data":"de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.864785 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vx6nr" event={"ID":"e8f0d437-97b9-4f08-8136-4a13fe545b08","Type":"ContainerStarted","Data":"1a05c62a1d31d4c9d4179641c27f02452ff91cba187fb5da6d60b2a4ff796eca"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.871831 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0207723-5d01-4034-bdcd-5bb28ff71021" containerID="0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688" exitCode=0 Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.871890 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerDied","Data":"0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.886154 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.907585 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.922133 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.955314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.955672 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.956018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.956418 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.956782 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:40Z","lastTransitionTime":"2026-03-20T15:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.964035 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.986056 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:40 crc kubenswrapper[4813]: I0320 15:39:40.998827 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:40Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.022817 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.038754 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.051598 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.059149 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.059177 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.059185 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.059198 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.059207 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.062561 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.074886 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.086385 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.098557 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.111785 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.124242 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.140209 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.158678 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.161268 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.161321 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.161344 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.161368 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.161385 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.172743 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.188565 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.211145 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.225025 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.237115 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.252750 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.264579 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.264618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.264631 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.264672 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.264687 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.265669 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.284179 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.295209 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.304214 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.321845 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.331663 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.345552 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.359323 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.367213 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.367264 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.367283 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.367312 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.367330 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.371166 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.389163 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.405794 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.422854 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.451244 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.469384 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.470791 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.470841 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.470859 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.470883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.470902 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.489625 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.506049 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.525051 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.546345 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.571726 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.573895 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.573949 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.573968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.573991 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.574009 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.612249 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.637568 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.656566 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.676953 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.677006 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.677024 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.677048 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.677066 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.779781 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.779846 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.779863 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.779886 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.779907 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.881977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.882065 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.882085 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.882109 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.882127 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.884152 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.884591 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.884662 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.884692 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.890954 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0207723-5d01-4034-bdcd-5bb28ff71021" containerID="77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9" exitCode=0 Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.891012 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerDied","Data":"77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.906823 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.930187 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.932456 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.934773 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.953173 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.972732 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.985959 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.986046 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.986066 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.986089 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.986106 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:41Z","lastTransitionTime":"2026-03-20T15:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:41 crc kubenswrapper[4813]: I0320 15:39:41.993113 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.006181 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.020981 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.033459 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.055057 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.068035 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.079126 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.088056 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.088099 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.088111 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.088128 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.088139 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.097824 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.113046 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.124426 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.134984 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.146711 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.155943 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.174716 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.186546 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.190242 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.190280 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.190292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.190309 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.190324 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.196501 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.210962 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.224532 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.239654 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.253325 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.265528 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.265561 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.265541 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.265685 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.265776 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.265847 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.266529 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.280104 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.280273 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:40:14.280252048 +0000 UTC m=+143.702954889 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.291565 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.294242 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.294274 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.294282 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.294295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.294304 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.302790 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.313886 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.326712 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.335501 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.381240 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.381294 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.381317 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.381338 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381477 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381465 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381534 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381563 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381579 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381605 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:40:14.38158105 +0000 UTC m=+143.804283921 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381517 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381629 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:40:14.381615641 +0000 UTC m=+143.804318542 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381638 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381689 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:40:14.381673833 +0000 UTC m=+143.804376734 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381712 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: E0320 15:39:42.381871 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:40:14.381845497 +0000 UTC m=+143.804548388 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.396289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.396339 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.396358 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.396379 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.396393 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.498999 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.499041 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.499055 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.499071 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.499083 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.601515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.601592 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.601615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.601646 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.601669 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.704601 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.704668 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.704690 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.704719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.704740 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.807558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.807613 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.807633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.807661 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.807682 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.912534 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.912597 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.912619 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.912649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.912674 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:42Z","lastTransitionTime":"2026-03-20T15:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.914780 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" event={"ID":"f0207723-5d01-4034-bdcd-5bb28ff71021","Type":"ContainerStarted","Data":"a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824"} Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.949703 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.979765 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:42 crc kubenswrapper[4813]: I0320 15:39:42.998208 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:42Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.012472 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.016238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.016290 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.016306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.016329 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.016346 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.031290 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.044524 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.058224 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.072458 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.083395 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.095778 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.109030 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.119389 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.119564 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.119594 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.119683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.119767 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.122870 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.152704 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.166181 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.176982 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:43Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.222348 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.222397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.222409 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.222424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.222433 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.324827 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.324877 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.324887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.324904 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.324916 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.427737 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.427792 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.427809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.427832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.427847 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.530979 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.531041 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.531058 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.531082 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.531103 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.634926 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.634992 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.635009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.635031 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.635053 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.737327 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.737356 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.737366 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.737380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.737389 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.840397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.840444 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.840456 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.840513 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.840525 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.943219 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.943303 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.943327 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.943351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:43 crc kubenswrapper[4813]: I0320 15:39:43.943368 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:43Z","lastTransitionTime":"2026-03-20T15:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.046588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.046673 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.046691 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.046716 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.046734 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.149743 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.149817 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.149836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.149866 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.149887 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.252259 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.252298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.252308 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.252324 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.252338 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.266640 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.266757 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:44 crc kubenswrapper[4813]: E0320 15:39:44.266835 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.266752 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:44 crc kubenswrapper[4813]: E0320 15:39:44.266995 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:44 crc kubenswrapper[4813]: E0320 15:39:44.267156 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.355122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.355168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.355180 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.355198 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.355211 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.458616 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.458695 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.458719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.458752 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.458777 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.561566 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.561633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.561651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.561678 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.561695 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.664672 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.664744 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.664764 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.664790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.664809 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.768636 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.768700 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.768717 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.768743 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.768762 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.871289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.871365 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.871393 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.871420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.871443 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.974628 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.974817 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.974832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.974852 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:44 crc kubenswrapper[4813]: I0320 15:39:44.974864 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:44Z","lastTransitionTime":"2026-03-20T15:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.078422 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.078598 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.078624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.078667 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.078691 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.181836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.181909 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.181919 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.181934 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.181943 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.283587 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.283615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.283623 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.283634 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.283642 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.386319 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.386476 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.386558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.386584 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.386715 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.489382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.489436 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.489455 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.489564 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.489580 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.592972 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.593057 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.593081 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.593111 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.593133 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.699853 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.699907 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.699928 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.699948 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.699962 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.805256 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.805698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.805852 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.806012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.806153 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.910683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.910769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.910793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.910828 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.910861 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:45Z","lastTransitionTime":"2026-03-20T15:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.930009 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/0.log" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.934736 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9" exitCode=1 Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.934811 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9"} Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.936175 4813 scope.go:117] "RemoveContainer" containerID="353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.964357 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:45Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.991338 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2"] Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.992013 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:45Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:45 crc kubenswrapper[4813]: I0320 15:39:45.995743 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:45.999186 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:45.999415 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.031354 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.031687 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.031700 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.031717 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.031730 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.034154 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.059900 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.076572 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.085284 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.094900 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.102269 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.122101 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:39:44.598769 6633 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 15:39:44.598781 6633 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:44.597346 6633 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 15:39:44.599809 6633 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:39:44.599876 6633 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:39:44.599898 6633 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:39:44.599951 6633 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:39:44.599973 6633 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:39:44.600000 6633 factory.go:656] Stopping watch factory\\\\nI0320 15:39:44.600003 6633 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:39:44.600030 6633 ovnkube.go:599] Stopped ovnkube\\\\nI0320 15:39:44.600029 6633 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:39:44.600051 6633 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:44.600058 6633 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI03\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.127403 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.127504 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf2jn\" (UniqueName: \"kubernetes.io/projected/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-kube-api-access-tf2jn\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.127553 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.127610 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.134391 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.134438 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.134454 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.134479 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.134521 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.142112 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.154344 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.175909 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.190676 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.205539 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.215163 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.230085 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.230132 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf2jn\" (UniqueName: \"kubernetes.io/projected/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-kube-api-access-tf2jn\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.230156 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.230182 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.231133 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.231216 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.231920 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.235699 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.238293 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.238350 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.238372 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.238400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.238421 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.249921 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.253299 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf2jn\" (UniqueName: \"kubernetes.io/projected/9c5cc732-9b60-47cf-93f1-346fa99b4ac4-kube-api-access-tf2jn\") pod \"ovnkube-control-plane-749d76644c-jmrs2\" (UID: \"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.262921 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.265066 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.265192 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.265262 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:46 crc kubenswrapper[4813]: E0320 15:39:46.265319 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:46 crc kubenswrapper[4813]: E0320 15:39:46.265394 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:46 crc kubenswrapper[4813]: E0320 15:39:46.265440 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.279306 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.296125 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.306879 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.317910 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.328598 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.340558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.340600 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.340614 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.340632 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.340644 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.340850 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.343026 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" Mar 20 15:39:46 crc kubenswrapper[4813]: W0320 15:39:46.356567 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c5cc732_9b60_47cf_93f1_346fa99b4ac4.slice/crio-07eb9b537ceb7021596902425b515e146a0c318a3cb49bb949d818745f1cac86 WatchSource:0}: Error finding container 07eb9b537ceb7021596902425b515e146a0c318a3cb49bb949d818745f1cac86: Status 404 returned error can't find the container with id 07eb9b537ceb7021596902425b515e146a0c318a3cb49bb949d818745f1cac86 Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.367020 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:39:44.598769 6633 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 15:39:44.598781 6633 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:44.597346 6633 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 15:39:44.599809 6633 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:39:44.599876 6633 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:39:44.599898 6633 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:39:44.599951 6633 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:39:44.599973 6633 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:39:44.600000 6633 factory.go:656] Stopping watch factory\\\\nI0320 15:39:44.600003 6633 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:39:44.600030 6633 ovnkube.go:599] Stopped ovnkube\\\\nI0320 15:39:44.600029 6633 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:39:44.600051 6633 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:44.600058 6633 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI03\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.392348 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.416090 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.447113 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.447395 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.447792 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.447915 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.448001 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.454999 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.477389 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.491960 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.511494 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.550188 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.550223 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.550234 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.550248 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.550258 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.653233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.653374 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.653446 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.653555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.653639 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.743307 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-lc5px"] Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.743762 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:46 crc kubenswrapper[4813]: E0320 15:39:46.743823 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.755878 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.755932 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.755954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.755973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.755984 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.767301 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.782863 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.799022 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.811281 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.826024 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.835871 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.836161 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xjq9\" (UniqueName: \"kubernetes.io/projected/5e5f925a-75e4-485f-9d5e-2be4c2c13616-kube-api-access-9xjq9\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.845847 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.857767 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.857811 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.857823 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.857840 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.857853 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.860812 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.874643 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.901440 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.915150 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.924995 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.936944 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.937004 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xjq9\" (UniqueName: \"kubernetes.io/projected/5e5f925a-75e4-485f-9d5e-2be4c2c13616-kube-api-access-9xjq9\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:46 crc kubenswrapper[4813]: E0320 15:39:46.937101 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:46 crc kubenswrapper[4813]: E0320 15:39:46.937165 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:47.437147771 +0000 UTC m=+116.859850612 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.937811 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.939650 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/0.log" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.943475 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.944018 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.945296 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" event={"ID":"9c5cc732-9b60-47cf-93f1-346fa99b4ac4","Type":"ContainerStarted","Data":"332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.945333 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" event={"ID":"9c5cc732-9b60-47cf-93f1-346fa99b4ac4","Type":"ContainerStarted","Data":"95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.945349 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" event={"ID":"9c5cc732-9b60-47cf-93f1-346fa99b4ac4","Type":"ContainerStarted","Data":"07eb9b537ceb7021596902425b515e146a0c318a3cb49bb949d818745f1cac86"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.951355 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.953390 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xjq9\" (UniqueName: \"kubernetes.io/projected/5e5f925a-75e4-485f-9d5e-2be4c2c13616-kube-api-access-9xjq9\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.960168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.960295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.960352 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.960414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.960836 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:46Z","lastTransitionTime":"2026-03-20T15:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.969541 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:39:44.598769 6633 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 15:39:44.598781 6633 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:44.597346 6633 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 15:39:44.599809 6633 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:39:44.599876 6633 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:39:44.599898 6633 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:39:44.599951 6633 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:39:44.599973 6633 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:39:44.600000 6633 factory.go:656] Stopping watch factory\\\\nI0320 15:39:44.600003 6633 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:39:44.600030 6633 ovnkube.go:599] Stopped ovnkube\\\\nI0320 15:39:44.600029 6633 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:39:44.600051 6633 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:44.600058 6633 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI03\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.980788 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:46 crc kubenswrapper[4813]: I0320 15:39:46.995160 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.006012 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.019405 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.029938 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.049357 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:39:44.598769 6633 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 15:39:44.598781 6633 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:44.597346 6633 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 15:39:44.599809 6633 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:39:44.599876 6633 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:39:44.599898 6633 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:39:44.599951 6633 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:39:44.599973 6633 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:39:44.600000 6633 factory.go:656] Stopping watch factory\\\\nI0320 15:39:44.600003 6633 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:39:44.600030 6633 ovnkube.go:599] Stopped ovnkube\\\\nI0320 15:39:44.600029 6633 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:39:44.600051 6633 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:44.600058 6633 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI03\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.061120 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.066362 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.066400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.066410 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.066426 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.066440 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.077538 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.089150 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.102355 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.121013 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.134129 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.145779 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.164124 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.168844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.168883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.168894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.168912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.168923 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.174230 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.186475 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.201130 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.213772 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.223980 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.236398 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.271343 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.271388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.271400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.271417 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.271430 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.374741 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.375050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.375201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.375364 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.375587 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.443986 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:47 crc kubenswrapper[4813]: E0320 15:39:47.444223 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:47 crc kubenswrapper[4813]: E0320 15:39:47.444892 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:48.444849657 +0000 UTC m=+117.867552548 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.479590 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.479742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.480234 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.480306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.480645 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.584257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.584336 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.584357 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.584386 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.584405 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.689235 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.689311 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.689333 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.689361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.689382 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.793219 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.793270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.793283 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.793303 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.793316 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.896348 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.896390 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.896424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.896445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.896459 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.951026 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/1.log" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.951587 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/0.log" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.954681 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028" exitCode=1 Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.954766 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028"} Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.955045 4813 scope.go:117] "RemoveContainer" containerID="353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.955317 4813 scope.go:117] "RemoveContainer" containerID="db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028" Mar 20 15:39:47 crc kubenswrapper[4813]: E0320 15:39:47.955566 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.968781 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.982292 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.996334 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:47Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.999656 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.999722 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.999740 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.999764 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:47 crc kubenswrapper[4813]: I0320 15:39:47.999782 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:47Z","lastTransitionTime":"2026-03-20T15:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.025605 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://353eee6e6749cf17c3bfb4b6dd2e7fe560498dd7a9c8bdccc6a2d8dfec621ec9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:39:44.598769 6633 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 15:39:44.598781 6633 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:44.597346 6633 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 15:39:44.599809 6633 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:39:44.599876 6633 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:39:44.599898 6633 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:39:44.599951 6633 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:39:44.599973 6633 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:39:44.600000 6633 factory.go:656] Stopping watch factory\\\\nI0320 15:39:44.600003 6633 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:39:44.600030 6633 ovnkube.go:599] Stopped ovnkube\\\\nI0320 15:39:44.600029 6633 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:39:44.600051 6633 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:44.600058 6633 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI03\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"bhook \\\\\\\"pod.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/pod?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z\\\\nI0320 15:39:46.931143 6814 obj_retry.go:409] Going to retry *v1.Pod resource setup for 1 objects: [openshift-multus/network-metrics-daemon-lc5px]\\\\nI0320 15:39:46.931152 6814 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0320 15:39:46.931151 6814 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:46.931166 6814 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:46.931166 6814 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 15:39:46.931185 6814 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-lc5px before timer (time: 2026-03-20 15:39:47.908471187 +0000 UTC m=+1.647901632): skip\\\\nI0320 15:39:46.931204 6814 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 62.112µs)\\\\nI0320 15:39:46.931214 6814 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0320 15:39:46.931244 6814 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.041891 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.058750 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.075937 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.101648 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.101693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.101703 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.101717 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.101726 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.106077 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.122878 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.138731 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.151771 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.165084 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.186059 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.202132 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.204638 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.204712 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.204734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.204763 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.204783 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.218195 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.235559 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.249766 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.265798 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:48 crc kubenswrapper[4813]: E0320 15:39:48.265945 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.265804 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:48 crc kubenswrapper[4813]: E0320 15:39:48.266037 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.265798 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:48 crc kubenswrapper[4813]: E0320 15:39:48.266110 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.266322 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:48 crc kubenswrapper[4813]: E0320 15:39:48.266570 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.307624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.307845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.307949 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.308050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.308162 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.410784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.410829 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.410840 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.410858 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.410870 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.455096 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:48 crc kubenswrapper[4813]: E0320 15:39:48.455671 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:48 crc kubenswrapper[4813]: E0320 15:39:48.455924 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:50.455897548 +0000 UTC m=+119.878600429 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.513520 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.513572 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.513589 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.513613 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.513630 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.616421 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.616885 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.616930 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.617009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.617032 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.719410 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.719436 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.719445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.719457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.719465 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.821929 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.822223 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.822321 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.822457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.822606 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.925434 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.925761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.925872 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.925967 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.926050 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:48Z","lastTransitionTime":"2026-03-20T15:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.961101 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/1.log" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.965774 4813 scope.go:117] "RemoveContainer" containerID="db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028" Mar 20 15:39:48 crc kubenswrapper[4813]: E0320 15:39:48.966015 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:39:48 crc kubenswrapper[4813]: I0320 15:39:48.983142 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.002382 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:48Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.022899 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.028134 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.028176 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.028188 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.028205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.028218 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.037096 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.049879 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.074291 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.090653 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.107573 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.122981 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.131342 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.131387 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.131404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.131428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.131447 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.136589 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.146014 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.158720 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.177614 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.188077 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.205150 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"bhook \\\\\\\"pod.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/pod?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z\\\\nI0320 15:39:46.931143 6814 obj_retry.go:409] Going to retry *v1.Pod resource setup for 1 objects: [openshift-multus/network-metrics-daemon-lc5px]\\\\nI0320 15:39:46.931152 6814 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0320 15:39:46.931151 6814 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:46.931166 6814 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:46.931166 6814 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 15:39:46.931185 6814 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-lc5px before timer (time: 2026-03-20 15:39:47.908471187 +0000 UTC m=+1.647901632): skip\\\\nI0320 15:39:46.931204 6814 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 62.112µs)\\\\nI0320 15:39:46.931214 6814 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0320 15:39:46.931244 6814 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.217760 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.230012 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.233736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.233779 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.233790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.233810 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.233822 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.336984 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.337238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.337248 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.337261 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.337294 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.440428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.440474 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.440489 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.440521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.440533 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.543008 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.543052 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.543063 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.543079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.543091 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.646611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.646656 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.646667 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.646683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.646695 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.749674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.749746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.749768 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.749827 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.749850 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.853264 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.853340 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.853366 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.853394 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.853417 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.936403 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.936456 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.936470 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.936486 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.936525 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: E0320 15:39:49.957940 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.963087 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.963124 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.963133 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.963148 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.963161 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:49 crc kubenswrapper[4813]: E0320 15:39:49.983159 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.988106 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.988157 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.988180 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.988208 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:49 crc kubenswrapper[4813]: I0320 15:39:49.988231 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:49Z","lastTransitionTime":"2026-03-20T15:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.001800 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:49Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.004807 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.004829 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.004839 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.004854 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.004865 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.019867 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:50Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.022706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.022726 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.022734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.022746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.022754 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.036862 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:50Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.036990 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.038575 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.038596 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.038609 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.038622 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.038634 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.141351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.141398 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.141413 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.141438 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.141454 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.244235 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.244269 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.244280 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.244295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.244306 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.346725 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.346761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.346772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.346788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.346798 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.448837 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.448877 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.448887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.448903 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.448913 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.475822 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.475970 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.476016 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:39:54.476000967 +0000 UTC m=+123.898703818 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.551864 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.551906 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.551920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.551939 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.551955 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.641990 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.642119 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.642322 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.642393 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.642443 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.642550 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.642724 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:50 crc kubenswrapper[4813]: E0320 15:39:50.642884 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.655079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.655108 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.655123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.655143 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.655156 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.758021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.758064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.758075 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.758091 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.758103 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.860685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.860747 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.860766 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.860786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.860799 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.963256 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.963314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.963329 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.963350 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:50 crc kubenswrapper[4813]: I0320 15:39:50.963366 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:50Z","lastTransitionTime":"2026-03-20T15:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.065595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.065630 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.065638 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.065651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.065659 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:51Z","lastTransitionTime":"2026-03-20T15:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.168342 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.168387 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.168401 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.168418 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.168427 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:39:51Z","lastTransitionTime":"2026-03-20T15:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:39:51 crc kubenswrapper[4813]: E0320 15:39:51.268819 4813 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.281753 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.295411 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.310038 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.320516 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.331733 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.342978 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.351266 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.362634 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: E0320 15:39:51.372800 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.377440 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.387706 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.400958 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.413825 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.423509 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.432072 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.441597 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.449885 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.464056 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"bhook \\\\\\\"pod.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/pod?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z\\\\nI0320 15:39:46.931143 6814 obj_retry.go:409] Going to retry *v1.Pod resource setup for 1 objects: [openshift-multus/network-metrics-daemon-lc5px]\\\\nI0320 15:39:46.931152 6814 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0320 15:39:46.931151 6814 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:46.931166 6814 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:46.931166 6814 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 15:39:46.931185 6814 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-lc5px before timer (time: 2026-03-20 15:39:47.908471187 +0000 UTC m=+1.647901632): skip\\\\nI0320 15:39:46.931204 6814 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 62.112µs)\\\\nI0320 15:39:46.931214 6814 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0320 15:39:46.931244 6814 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.473233 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.484903 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.499288 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.514734 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.525671 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.557060 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"bhook \\\\\\\"pod.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/pod?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z\\\\nI0320 15:39:46.931143 6814 obj_retry.go:409] Going to retry *v1.Pod resource setup for 1 objects: [openshift-multus/network-metrics-daemon-lc5px]\\\\nI0320 15:39:46.931152 6814 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0320 15:39:46.931151 6814 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:46.931166 6814 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:46.931166 6814 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 15:39:46.931185 6814 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-lc5px before timer (time: 2026-03-20 15:39:47.908471187 +0000 UTC m=+1.647901632): skip\\\\nI0320 15:39:46.931204 6814 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 62.112µs)\\\\nI0320 15:39:46.931214 6814 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0320 15:39:46.931244 6814 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.572862 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.589790 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.621904 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.645752 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.660210 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.671048 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.685322 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.699721 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.717002 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.733743 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.751377 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:51 crc kubenswrapper[4813]: I0320 15:39:51.769434 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:39:52 crc kubenswrapper[4813]: I0320 15:39:52.265080 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:52 crc kubenswrapper[4813]: I0320 15:39:52.265154 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:52 crc kubenswrapper[4813]: I0320 15:39:52.265289 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:52 crc kubenswrapper[4813]: E0320 15:39:52.265282 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:52 crc kubenswrapper[4813]: E0320 15:39:52.265474 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:39:52 crc kubenswrapper[4813]: E0320 15:39:52.265725 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:52 crc kubenswrapper[4813]: I0320 15:39:52.265834 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:52 crc kubenswrapper[4813]: E0320 15:39:52.266060 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:54 crc kubenswrapper[4813]: I0320 15:39:54.265340 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:54 crc kubenswrapper[4813]: I0320 15:39:54.265410 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:54 crc kubenswrapper[4813]: I0320 15:39:54.265389 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:54 crc kubenswrapper[4813]: I0320 15:39:54.265340 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:54 crc kubenswrapper[4813]: E0320 15:39:54.265620 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:54 crc kubenswrapper[4813]: E0320 15:39:54.265817 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:39:54 crc kubenswrapper[4813]: E0320 15:39:54.265994 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:54 crc kubenswrapper[4813]: E0320 15:39:54.266157 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:54 crc kubenswrapper[4813]: I0320 15:39:54.519564 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:54 crc kubenswrapper[4813]: E0320 15:39:54.519838 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:54 crc kubenswrapper[4813]: E0320 15:39:54.519957 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:40:02.519929803 +0000 UTC m=+131.942632714 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:39:56 crc kubenswrapper[4813]: I0320 15:39:56.265641 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:56 crc kubenswrapper[4813]: I0320 15:39:56.265649 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:56 crc kubenswrapper[4813]: E0320 15:39:56.266094 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:56 crc kubenswrapper[4813]: E0320 15:39:56.265815 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:56 crc kubenswrapper[4813]: I0320 15:39:56.266707 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:56 crc kubenswrapper[4813]: E0320 15:39:56.266838 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:39:56 crc kubenswrapper[4813]: I0320 15:39:56.267309 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:56 crc kubenswrapper[4813]: E0320 15:39:56.267456 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:56 crc kubenswrapper[4813]: E0320 15:39:56.374303 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:39:58 crc kubenswrapper[4813]: I0320 15:39:58.265181 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:39:58 crc kubenswrapper[4813]: E0320 15:39:58.265613 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:39:58 crc kubenswrapper[4813]: I0320 15:39:58.265183 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:39:58 crc kubenswrapper[4813]: I0320 15:39:58.265185 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:39:58 crc kubenswrapper[4813]: E0320 15:39:58.265698 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:39:58 crc kubenswrapper[4813]: I0320 15:39:58.265738 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:39:58 crc kubenswrapper[4813]: E0320 15:39:58.265866 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:39:58 crc kubenswrapper[4813]: E0320 15:39:58.265979 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:39:59 crc kubenswrapper[4813]: I0320 15:39:59.278605 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.227062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.227126 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.227148 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.227175 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.227196 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:00Z","lastTransitionTime":"2026-03-20T15:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.250916 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:00Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.263726 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.263870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.263890 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.263956 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.263975 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:00Z","lastTransitionTime":"2026-03-20T15:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.264960 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.265041 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.265123 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.265166 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.265313 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.265512 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.265699 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.265852 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.287001 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:00Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.292980 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.293115 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.293211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.293301 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.293395 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:00Z","lastTransitionTime":"2026-03-20T15:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.313101 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:00Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.318468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.318637 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.318743 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.318835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.318916 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:00Z","lastTransitionTime":"2026-03-20T15:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.336974 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:00Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.343070 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.343118 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.343135 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.343159 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:00 crc kubenswrapper[4813]: I0320 15:40:00.343180 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:00Z","lastTransitionTime":"2026-03-20T15:40:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.367406 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:00Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:00 crc kubenswrapper[4813]: E0320 15:40:00.367667 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.267357 4813 scope.go:117] "RemoveContainer" containerID="db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.284741 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.302345 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.321716 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.338976 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.357587 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: E0320 15:40:01.375173 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.375312 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.411135 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.429914 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.446417 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.471927 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.489813 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.501938 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.517025 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.530938 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.540870 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.557742 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"bhook \\\\\\\"pod.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/pod?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z\\\\nI0320 15:39:46.931143 6814 obj_retry.go:409] Going to retry *v1.Pod resource setup for 1 objects: [openshift-multus/network-metrics-daemon-lc5px]\\\\nI0320 15:39:46.931152 6814 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0320 15:39:46.931151 6814 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:46.931166 6814 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:46.931166 6814 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 15:39:46.931185 6814 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-lc5px before timer (time: 2026-03-20 15:39:47.908471187 +0000 UTC m=+1.647901632): skip\\\\nI0320 15:39:46.931204 6814 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 62.112µs)\\\\nI0320 15:39:46.931214 6814 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0320 15:39:46.931244 6814 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.569743 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:01 crc kubenswrapper[4813]: I0320 15:40:01.581762 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:01Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.010823 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/1.log" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.013757 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d"} Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.014687 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.036220 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.057380 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.073817 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.087845 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.105688 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.124026 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.144216 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.158962 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.178235 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.193052 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.207945 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.233230 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.245131 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.262081 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"bhook \\\\\\\"pod.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/pod?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z\\\\nI0320 15:39:46.931143 6814 obj_retry.go:409] Going to retry *v1.Pod resource setup for 1 objects: [openshift-multus/network-metrics-daemon-lc5px]\\\\nI0320 15:39:46.931152 6814 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0320 15:39:46.931151 6814 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:46.931166 6814 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:46.931166 6814 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 15:39:46.931185 6814 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-lc5px before timer (time: 2026-03-20 15:39:47.908471187 +0000 UTC m=+1.647901632): skip\\\\nI0320 15:39:46.931204 6814 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 62.112µs)\\\\nI0320 15:39:46.931214 6814 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0320 15:39:46.931244 6814 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.265082 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.265124 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.265103 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.265100 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:02 crc kubenswrapper[4813]: E0320 15:40:02.265261 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:02 crc kubenswrapper[4813]: E0320 15:40:02.265380 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:02 crc kubenswrapper[4813]: E0320 15:40:02.265527 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:02 crc kubenswrapper[4813]: E0320 15:40:02.265679 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.274608 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.287668 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.299593 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.314030 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:02Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:02 crc kubenswrapper[4813]: I0320 15:40:02.616440 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:02 crc kubenswrapper[4813]: E0320 15:40:02.616723 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:40:02 crc kubenswrapper[4813]: E0320 15:40:02.616809 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:40:18.616785324 +0000 UTC m=+148.039488205 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.019814 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/2.log" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.020875 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/1.log" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.024960 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d" exitCode=1 Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.025016 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d"} Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.025064 4813 scope.go:117] "RemoveContainer" containerID="db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.026406 4813 scope.go:117] "RemoveContainer" containerID="f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d" Mar 20 15:40:03 crc kubenswrapper[4813]: E0320 15:40:03.026919 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.047162 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.066383 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.082011 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.114689 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://db8697700bd33e7d9e845a36c01a44a24fa7522a655a0c7d5cfbbf6d37d15028\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"bhook \\\\\\\"pod.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/pod?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:39:46Z is after 2025-08-24T17:21:41Z\\\\nI0320 15:39:46.931143 6814 obj_retry.go:409] Going to retry *v1.Pod resource setup for 1 objects: [openshift-multus/network-metrics-daemon-lc5px]\\\\nI0320 15:39:46.931152 6814 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0320 15:39:46.931151 6814 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:39:46.931166 6814 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0320 15:39:46.931166 6814 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 15:39:46.931185 6814 obj_retry.go:285] Attempting retry of *v1.Pod openshift-multus/network-metrics-daemon-lc5px before timer (time: 2026-03-20 15:39:47.908471187 +0000 UTC m=+1.647901632): skip\\\\nI0320 15:39:46.931204 6814 obj_retry.go:420] Function iterateRetryResources for *v1.Pod ended (in 62.112µs)\\\\nI0320 15:39:46.931214 6814 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0320 15:39:46.931244 6814 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.132222 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.152204 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.172661 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.189525 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.232809 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.256588 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.275535 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.294225 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.308397 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.323684 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.342055 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.362698 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.381466 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:03 crc kubenswrapper[4813]: I0320 15:40:03.398247 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:03Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.030586 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/2.log" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.035341 4813 scope.go:117] "RemoveContainer" containerID="f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d" Mar 20 15:40:04 crc kubenswrapper[4813]: E0320 15:40:04.035539 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.055651 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.076148 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.094007 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.129656 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.154745 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.174554 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.192805 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.215653 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.265902 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.265934 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:04 crc kubenswrapper[4813]: E0320 15:40:04.266544 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.266086 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:04 crc kubenswrapper[4813]: E0320 15:40:04.266741 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:04 crc kubenswrapper[4813]: E0320 15:40:04.266267 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.266024 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:04 crc kubenswrapper[4813]: E0320 15:40:04.266903 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.282098 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.295003 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.315287 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.333609 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.353108 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.367297 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.383451 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.398293 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.420965 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:04 crc kubenswrapper[4813]: I0320 15:40:04.435166 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:04Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:06 crc kubenswrapper[4813]: I0320 15:40:06.265828 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:06 crc kubenswrapper[4813]: E0320 15:40:06.266352 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:06 crc kubenswrapper[4813]: I0320 15:40:06.265979 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:06 crc kubenswrapper[4813]: I0320 15:40:06.265841 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:06 crc kubenswrapper[4813]: E0320 15:40:06.266443 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:06 crc kubenswrapper[4813]: I0320 15:40:06.265980 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:06 crc kubenswrapper[4813]: E0320 15:40:06.266551 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:06 crc kubenswrapper[4813]: E0320 15:40:06.266630 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:06 crc kubenswrapper[4813]: E0320 15:40:06.377336 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:08 crc kubenswrapper[4813]: I0320 15:40:08.265649 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:08 crc kubenswrapper[4813]: E0320 15:40:08.266661 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:08 crc kubenswrapper[4813]: I0320 15:40:08.265697 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:08 crc kubenswrapper[4813]: I0320 15:40:08.265755 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:08 crc kubenswrapper[4813]: E0320 15:40:08.267157 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:08 crc kubenswrapper[4813]: I0320 15:40:08.265674 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:08 crc kubenswrapper[4813]: E0320 15:40:08.267348 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:08 crc kubenswrapper[4813]: E0320 15:40:08.267370 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.265767 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.265817 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.265767 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.266035 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.266020 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.266142 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.266278 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.266361 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.467083 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.467147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.467164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.467188 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.467205 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:10Z","lastTransitionTime":"2026-03-20T15:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.487527 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:10Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.492776 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.492851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.492877 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.492911 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.492936 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:10Z","lastTransitionTime":"2026-03-20T15:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.516593 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:10Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.522363 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.522416 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.522439 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.522469 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.522526 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:10Z","lastTransitionTime":"2026-03-20T15:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.544137 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:10Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.550040 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.550094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.550112 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.550134 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.550151 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:10Z","lastTransitionTime":"2026-03-20T15:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.573402 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:10Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.578454 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.578554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.578576 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.578605 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:10 crc kubenswrapper[4813]: I0320 15:40:10.578626 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:10Z","lastTransitionTime":"2026-03-20T15:40:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.597508 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:10Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:10 crc kubenswrapper[4813]: E0320 15:40:10.597671 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.286791 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.309695 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.327171 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.362698 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: E0320 15:40:11.378129 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.392922 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.414631 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.431870 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.453840 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.475014 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.494990 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.520163 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.543378 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.561336 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.577422 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.599084 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.615680 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.646927 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:11 crc kubenswrapper[4813]: I0320 15:40:11.668730 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:11Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:12 crc kubenswrapper[4813]: I0320 15:40:12.265954 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:12 crc kubenswrapper[4813]: I0320 15:40:12.266065 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:12 crc kubenswrapper[4813]: E0320 15:40:12.266218 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:12 crc kubenswrapper[4813]: I0320 15:40:12.266266 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:12 crc kubenswrapper[4813]: I0320 15:40:12.266290 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:12 crc kubenswrapper[4813]: E0320 15:40:12.266475 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:12 crc kubenswrapper[4813]: E0320 15:40:12.266758 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:12 crc kubenswrapper[4813]: E0320 15:40:12.266902 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.265322 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.265553 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.265589 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.265695 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.265713 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.265828 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.265928 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.266022 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.364237 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.364608 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:18.364533533 +0000 UTC m=+207.787236414 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.465196 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.465272 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.465308 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:14 crc kubenswrapper[4813]: I0320 15:40:14.465344 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465559 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465602 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465641 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465638 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465700 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465722 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465662 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465703 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:41:18.465667039 +0000 UTC m=+207.888369910 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465584 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465913 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:41:18.465840203 +0000 UTC m=+207.888543054 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465962 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:41:18.465949286 +0000 UTC m=+207.888652297 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:40:14 crc kubenswrapper[4813]: E0320 15:40:14.465984 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:41:18.465976207 +0000 UTC m=+207.888679058 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:40:15 crc kubenswrapper[4813]: I0320 15:40:15.266557 4813 scope.go:117] "RemoveContainer" containerID="f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d" Mar 20 15:40:15 crc kubenswrapper[4813]: E0320 15:40:15.266955 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:40:16 crc kubenswrapper[4813]: I0320 15:40:16.265046 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:16 crc kubenswrapper[4813]: I0320 15:40:16.265113 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:16 crc kubenswrapper[4813]: E0320 15:40:16.265216 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:16 crc kubenswrapper[4813]: I0320 15:40:16.265268 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:16 crc kubenswrapper[4813]: I0320 15:40:16.265279 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:16 crc kubenswrapper[4813]: E0320 15:40:16.265412 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:16 crc kubenswrapper[4813]: E0320 15:40:16.265652 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:16 crc kubenswrapper[4813]: E0320 15:40:16.265998 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:16 crc kubenswrapper[4813]: E0320 15:40:16.379854 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:18 crc kubenswrapper[4813]: I0320 15:40:18.265602 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:18 crc kubenswrapper[4813]: I0320 15:40:18.265669 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:18 crc kubenswrapper[4813]: I0320 15:40:18.265689 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:18 crc kubenswrapper[4813]: I0320 15:40:18.265624 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:18 crc kubenswrapper[4813]: E0320 15:40:18.265798 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:18 crc kubenswrapper[4813]: E0320 15:40:18.265899 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:18 crc kubenswrapper[4813]: E0320 15:40:18.266045 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:18 crc kubenswrapper[4813]: E0320 15:40:18.266167 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:18 crc kubenswrapper[4813]: I0320 15:40:18.717314 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:18 crc kubenswrapper[4813]: E0320 15:40:18.717602 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:40:18 crc kubenswrapper[4813]: E0320 15:40:18.717737 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:40:50.717707821 +0000 UTC m=+180.140410692 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.265409 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.265457 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.265457 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.265658 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.265772 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.265860 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.265996 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.266106 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.687891 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.687927 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.687937 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.687952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.687965 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:20Z","lastTransitionTime":"2026-03-20T15:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.707589 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:20Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.711776 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.711834 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.711850 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.711869 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.711884 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:20Z","lastTransitionTime":"2026-03-20T15:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.732309 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:20Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.737540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.737595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.737609 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.737628 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.737643 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:20Z","lastTransitionTime":"2026-03-20T15:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.758403 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:20Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.763262 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.763300 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.763311 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.763328 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.763339 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:20Z","lastTransitionTime":"2026-03-20T15:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.777718 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:20Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.782401 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.782474 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.782539 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.782563 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:20 crc kubenswrapper[4813]: I0320 15:40:20.782581 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:20Z","lastTransitionTime":"2026-03-20T15:40:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.801192 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:20Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:20 crc kubenswrapper[4813]: E0320 15:40:20.801338 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.288954 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.306990 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.331454 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.354705 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.366887 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: E0320 15:40:21.381188 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.384689 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.401803 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.418756 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.437237 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.453312 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.465801 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.488778 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.506231 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.521544 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.547323 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.569685 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.584485 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:21 crc kubenswrapper[4813]: I0320 15:40:21.605015 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:21Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:22 crc kubenswrapper[4813]: I0320 15:40:22.265426 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:22 crc kubenswrapper[4813]: E0320 15:40:22.266061 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:22 crc kubenswrapper[4813]: I0320 15:40:22.266442 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:22 crc kubenswrapper[4813]: E0320 15:40:22.266614 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:22 crc kubenswrapper[4813]: I0320 15:40:22.266883 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:22 crc kubenswrapper[4813]: E0320 15:40:22.267008 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:22 crc kubenswrapper[4813]: I0320 15:40:22.267290 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:22 crc kubenswrapper[4813]: E0320 15:40:22.267422 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.111581 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/0.log" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.111654 4813 generic.go:334] "Generic (PLEG): container finished" podID="a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1" containerID="2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147" exitCode=1 Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.111701 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerDied","Data":"2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147"} Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.112221 4813 scope.go:117] "RemoveContainer" containerID="2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.128885 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.144872 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.161946 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.182883 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.204692 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.221275 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:22Z\\\",\\\"message\\\":\\\"2026-03-20T15:39:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6\\\\n2026-03-20T15:39:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6 to /host/opt/cni/bin/\\\\n2026-03-20T15:39:37Z [verbose] multus-daemon started\\\\n2026-03-20T15:39:37Z [verbose] Readiness Indicator file check\\\\n2026-03-20T15:40:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.238390 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.262971 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.275568 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.297727 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.313056 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.326363 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.343022 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.357761 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.378800 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.391064 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.399122 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:23 crc kubenswrapper[4813]: I0320 15:40:23.407959 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:23Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.117827 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/0.log" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.117909 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerStarted","Data":"8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d"} Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.140326 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.162636 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.180338 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.200589 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.220097 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:22Z\\\",\\\"message\\\":\\\"2026-03-20T15:39:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6\\\\n2026-03-20T15:39:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6 to /host/opt/cni/bin/\\\\n2026-03-20T15:39:37Z [verbose] multus-daemon started\\\\n2026-03-20T15:39:37Z [verbose] Readiness Indicator file check\\\\n2026-03-20T15:40:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.239275 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.255844 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.265307 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.265411 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:24 crc kubenswrapper[4813]: E0320 15:40:24.265477 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.265669 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.265767 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:24 crc kubenswrapper[4813]: E0320 15:40:24.265665 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:24 crc kubenswrapper[4813]: E0320 15:40:24.265953 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:24 crc kubenswrapper[4813]: E0320 15:40:24.266320 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.273733 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.290155 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.312972 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.322586 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.332181 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.343069 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.352983 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.371669 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.386314 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.397912 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:24 crc kubenswrapper[4813]: I0320 15:40:24.410061 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:24Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:26 crc kubenswrapper[4813]: I0320 15:40:26.265447 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:26 crc kubenswrapper[4813]: I0320 15:40:26.265533 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:26 crc kubenswrapper[4813]: I0320 15:40:26.265447 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:26 crc kubenswrapper[4813]: E0320 15:40:26.265655 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:26 crc kubenswrapper[4813]: I0320 15:40:26.265701 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:26 crc kubenswrapper[4813]: E0320 15:40:26.265858 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:26 crc kubenswrapper[4813]: E0320 15:40:26.265983 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:26 crc kubenswrapper[4813]: E0320 15:40:26.266111 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:26 crc kubenswrapper[4813]: E0320 15:40:26.382386 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:28 crc kubenswrapper[4813]: I0320 15:40:28.265664 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:28 crc kubenswrapper[4813]: I0320 15:40:28.265738 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:28 crc kubenswrapper[4813]: I0320 15:40:28.265738 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:28 crc kubenswrapper[4813]: E0320 15:40:28.265840 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:28 crc kubenswrapper[4813]: I0320 15:40:28.265688 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:28 crc kubenswrapper[4813]: E0320 15:40:28.265985 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:28 crc kubenswrapper[4813]: E0320 15:40:28.266262 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:28 crc kubenswrapper[4813]: E0320 15:40:28.266223 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:29 crc kubenswrapper[4813]: I0320 15:40:29.266888 4813 scope.go:117] "RemoveContainer" containerID="f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.152106 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/2.log" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.156261 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.157024 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.191680 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.209900 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.224108 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.235155 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.250773 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:22Z\\\",\\\"message\\\":\\\"2026-03-20T15:39:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6\\\\n2026-03-20T15:39:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6 to /host/opt/cni/bin/\\\\n2026-03-20T15:39:37Z [verbose] multus-daemon started\\\\n2026-03-20T15:39:37Z [verbose] Readiness Indicator file check\\\\n2026-03-20T15:40:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.262571 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.265180 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.265217 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.265180 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:30 crc kubenswrapper[4813]: E0320 15:40:30.265342 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.265364 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:30 crc kubenswrapper[4813]: E0320 15:40:30.265436 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:30 crc kubenswrapper[4813]: E0320 15:40:30.265550 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:30 crc kubenswrapper[4813]: E0320 15:40:30.265639 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.274806 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.285109 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.297938 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.310318 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.320234 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.329794 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.340080 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.348928 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.373383 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.386177 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.402949 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.416558 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:30Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.991738 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.991809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.991826 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.991860 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:30 crc kubenswrapper[4813]: I0320 15:40:30.991881 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:30Z","lastTransitionTime":"2026-03-20T15:40:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.012439 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.015961 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.016021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.016039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.016064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.016086 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:31Z","lastTransitionTime":"2026-03-20T15:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.034771 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.038750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.038787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.038797 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.038812 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.038823 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:31Z","lastTransitionTime":"2026-03-20T15:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.057309 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.061347 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.061409 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.061435 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.061457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.061509 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:31Z","lastTransitionTime":"2026-03-20T15:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.080320 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.085365 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.085420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.085437 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.085460 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.085478 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:31Z","lastTransitionTime":"2026-03-20T15:40:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.105002 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.105751 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.163251 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/3.log" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.164603 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/2.log" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.168607 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" exitCode=1 Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.168932 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.169241 4813 scope.go:117] "RemoveContainer" containerID="f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.169817 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.169995 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.187058 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.207359 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.223079 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.257374 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"ork=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler/scheduler_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler/scheduler\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.169\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0320 15:40:30.184369 7380 services_controller.go:452] Built service openshift-kube-scheduler/scheduler per-node LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184379 7380 services_controller.go:453] Built service openshift-kube-scheduler/scheduler template LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184387 7380 services_controller.go:454] Service openshift-kube-scheduler/scheduler for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0320 15:40:30.184391 7380 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.276305 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.294575 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.315822 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.333215 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.367154 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: E0320 15:40:31.382803 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.392655 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.409169 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.422453 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.439153 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.450050 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.463216 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.479189 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.489884 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:22Z\\\",\\\"message\\\":\\\"2026-03-20T15:39:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6\\\\n2026-03-20T15:39:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6 to /host/opt/cni/bin/\\\\n2026-03-20T15:39:37Z [verbose] multus-daemon started\\\\n2026-03-20T15:39:37Z [verbose] Readiness Indicator file check\\\\n2026-03-20T15:40:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.500893 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.522529 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.537520 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.549592 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.563528 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.575722 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:22Z\\\",\\\"message\\\":\\\"2026-03-20T15:39:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6\\\\n2026-03-20T15:39:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6 to /host/opt/cni/bin/\\\\n2026-03-20T15:39:37Z [verbose] multus-daemon started\\\\n2026-03-20T15:39:37Z [verbose] Readiness Indicator file check\\\\n2026-03-20T15:40:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.587424 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.596555 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.607698 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.624215 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.645131 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f044fb018cc4087a37fe53b65fd1b0394bbc5daaebf108d9bf28ac9ffed3d89d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:02Z\\\",\\\"message\\\":\\\"] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 15:40:02.267061 7047 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 15:40:02.267073 7047 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 15:40:02.267078 7047 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 15:40:02.267096 7047 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 15:40:02.267110 7047 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 15:40:02.267116 7047 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0320 15:40:02.267117 7047 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 15:40:02.267132 7047 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0320 15:40:02.267143 7047 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0320 15:40:02.268791 7047 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 15:40:02.268838 7047 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0320 15:40:02.268866 7047 factory.go:656] Stopping watch factory\\\\nI0320 15:40:02.268886 7047 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0320 15:40:02.268899 7047 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"ork=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler/scheduler_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler/scheduler\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.169\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0320 15:40:30.184369 7380 services_controller.go:452] Built service openshift-kube-scheduler/scheduler per-node LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184379 7380 services_controller.go:453] Built service openshift-kube-scheduler/scheduler template LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184387 7380 services_controller.go:454] Service openshift-kube-scheduler/scheduler for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0320 15:40:30.184391 7380 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.661207 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.676743 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.694171 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.709198 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.735574 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.753174 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.765703 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:31 crc kubenswrapper[4813]: I0320 15:40:31.778705 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:31Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.174064 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/3.log" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.178179 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:40:32 crc kubenswrapper[4813]: E0320 15:40:32.178467 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.205962 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"ork=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler/scheduler_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler/scheduler\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.169\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0320 15:40:30.184369 7380 services_controller.go:452] Built service openshift-kube-scheduler/scheduler per-node LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184379 7380 services_controller.go:453] Built service openshift-kube-scheduler/scheduler template LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184387 7380 services_controller.go:454] Service openshift-kube-scheduler/scheduler for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0320 15:40:30.184391 7380 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.223079 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.239671 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.258736 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.264884 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.264964 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.264895 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:32 crc kubenswrapper[4813]: E0320 15:40:32.265011 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.264954 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:32 crc kubenswrapper[4813]: E0320 15:40:32.265124 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:32 crc kubenswrapper[4813]: E0320 15:40:32.265231 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:32 crc kubenswrapper[4813]: E0320 15:40:32.265361 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.273586 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.285750 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.300602 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.314314 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.328617 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.357670 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.381218 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.397285 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.413843 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.429117 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:22Z\\\",\\\"message\\\":\\\"2026-03-20T15:39:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6\\\\n2026-03-20T15:39:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6 to /host/opt/cni/bin/\\\\n2026-03-20T15:39:37Z [verbose] multus-daemon started\\\\n2026-03-20T15:39:37Z [verbose] Readiness Indicator file check\\\\n2026-03-20T15:40:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.443724 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.460947 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.474920 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:32 crc kubenswrapper[4813]: I0320 15:40:32.492312 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:32Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:33 crc kubenswrapper[4813]: I0320 15:40:33.285767 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Mar 20 15:40:34 crc kubenswrapper[4813]: I0320 15:40:34.265677 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:34 crc kubenswrapper[4813]: I0320 15:40:34.265677 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:34 crc kubenswrapper[4813]: E0320 15:40:34.265896 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:34 crc kubenswrapper[4813]: I0320 15:40:34.265713 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:34 crc kubenswrapper[4813]: I0320 15:40:34.265674 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:34 crc kubenswrapper[4813]: E0320 15:40:34.266074 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:34 crc kubenswrapper[4813]: E0320 15:40:34.266194 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:34 crc kubenswrapper[4813]: E0320 15:40:34.266379 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:36 crc kubenswrapper[4813]: I0320 15:40:36.265680 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:36 crc kubenswrapper[4813]: I0320 15:40:36.265745 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:36 crc kubenswrapper[4813]: I0320 15:40:36.265816 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:36 crc kubenswrapper[4813]: E0320 15:40:36.265979 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:36 crc kubenswrapper[4813]: I0320 15:40:36.266238 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:36 crc kubenswrapper[4813]: E0320 15:40:36.266340 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:36 crc kubenswrapper[4813]: E0320 15:40:36.266648 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:36 crc kubenswrapper[4813]: E0320 15:40:36.266833 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:36 crc kubenswrapper[4813]: E0320 15:40:36.383880 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:38 crc kubenswrapper[4813]: I0320 15:40:38.264953 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:38 crc kubenswrapper[4813]: E0320 15:40:38.265315 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:38 crc kubenswrapper[4813]: I0320 15:40:38.264999 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:38 crc kubenswrapper[4813]: I0320 15:40:38.265051 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:38 crc kubenswrapper[4813]: I0320 15:40:38.264999 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:38 crc kubenswrapper[4813]: E0320 15:40:38.265577 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:38 crc kubenswrapper[4813]: E0320 15:40:38.265391 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:38 crc kubenswrapper[4813]: E0320 15:40:38.265621 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:40 crc kubenswrapper[4813]: I0320 15:40:40.265709 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:40 crc kubenswrapper[4813]: I0320 15:40:40.265715 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:40 crc kubenswrapper[4813]: I0320 15:40:40.265729 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:40 crc kubenswrapper[4813]: E0320 15:40:40.265924 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:40 crc kubenswrapper[4813]: E0320 15:40:40.266075 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:40 crc kubenswrapper[4813]: I0320 15:40:40.266124 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:40 crc kubenswrapper[4813]: E0320 15:40:40.266282 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:40 crc kubenswrapper[4813]: E0320 15:40:40.266431 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.177009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.177090 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.177113 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.177146 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.177168 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:41Z","lastTransitionTime":"2026-03-20T15:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:41 crc kubenswrapper[4813]: E0320 15:40:41.197928 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.202807 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.202876 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.202899 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.202930 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.202948 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:41Z","lastTransitionTime":"2026-03-20T15:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:41 crc kubenswrapper[4813]: E0320 15:40:41.216634 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.221025 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.221091 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.221114 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.221144 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.221165 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:41Z","lastTransitionTime":"2026-03-20T15:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:41 crc kubenswrapper[4813]: E0320 15:40:41.242068 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.246632 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.246688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.246708 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.246730 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.246746 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:41Z","lastTransitionTime":"2026-03-20T15:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:41 crc kubenswrapper[4813]: E0320 15:40:41.266360 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.270946 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.271013 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.271037 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.271068 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.271092 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:41Z","lastTransitionTime":"2026-03-20T15:40:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.284415 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d13e2ba-70c9-44e7-a6c2-1bc490f51a9b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3e5b35ee11de6cce8628de624ab4de23fa26b6790455ae162907c66b955744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d60baf24e00617b3e96d9af07701112a33b8f35a19253cf9903c737bfac03943\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: E0320 15:40:41.300061 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5a09fee1-2bf6-40b4-b86c-857730ce0ad5\\\",\\\"systemUUID\\\":\\\"a34aaf21-bc48-4d4d-b0c3-48ef72d66bb9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: E0320 15:40:41.300295 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.309074 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.326778 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-czvmm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5dd2ff6-f564-457c-9781-eefecdfea677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4439b82b8c7ff865a8c791eb0d91e392d3931c1728b921289dd7d5fbeb7fbc20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4qbv7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-czvmm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.359777 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"32fae70f-6b1f-4935-9747-8080c9feb514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:30Z\\\",\\\"message\\\":\\\"ork=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-scheduler/scheduler_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler/scheduler\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.169\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0320 15:40:30.184369 7380 services_controller.go:452] Built service openshift-kube-scheduler/scheduler per-node LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184379 7380 services_controller.go:453] Built service openshift-kube-scheduler/scheduler template LB for network=default: []services.LB{}\\\\nI0320 15:40:30.184387 7380 services_controller.go:454] Service openshift-kube-scheduler/scheduler for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0320 15:40:30.184391 7380 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:40:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tms64\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-dvvsh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.376436 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9c5cc732-9b60-47cf-93f1-346fa99b4ac4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95b05e5ce5277a33e4d933ce25805367258dc66e20fcfb443fca6f2e0cf35c42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://332ba533b4eea705a57b471d7634704cbc8ea1098d6e0138e3328864ab71e0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tf2jn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jmrs2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: E0320 15:40:41.384534 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.407347 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d2aef7-338a-443b-b19e-27da74e91870\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba1359f3d02ec309b720d41a578edee948d6fe134c2f110ccf3e20a0cb745a8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2c6b6be5a3c802947f83131f64a56b0438a1a15b908524c54cee55759b168ce\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:51Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 15:38:22.398799 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 15:38:22.400199 1 observer_polling.go:159] Starting file observer\\\\nI0320 15:38:22.407221 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 15:38:22.408276 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 15:38:49.719729 1 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials\\\\nI0320 15:38:51.930989 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 15:38:51.931074 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:22Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://86ccefb1bcc2d04371d91a518875328c145139e697219054fb3e9afb0f30716c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5c590d909352798fc16de9dea6223ff5fa5299909451ff7a7c59e2a208a6dc4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://70aa29b0ae23f70f78b4c8c6923ab47903541d022d5f008051f2faa51b484018\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.425715 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a70ed5bb-7ce0-43af-8620-a7499e47e656\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273729cff607ab86d5b355d96024536f314bcc351b28d104467e08ca86f83a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15afcda259bf2c5521426ed928da31b3874fd6a43df1adac634f77648176cd49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad853de2df110ff5882d61990b3b3247631679e3452512b22664c553fb85a696\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f906bdd7cc07c9e5d4190bdaab322c25e2ad2d0ce35e51fc704e1bbd7e3a6d7a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.445912 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa973727701c06f689075ea68404e06e520b4f17c85296e018263d3bfc1df758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.462364 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc04883-b38a-4b6a-bee4-f6804c8aad94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2757fa9ae2115f2159512deb22eba2313b2c31150b9be6be8361ba4ec277c574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8979w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-l8d6t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.494742 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f9061dd-7e46-49e8-a879-e224a7ab2d36\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:38:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbf6b7a5d60a9e0777d6507bb9a6f6b94795f125327490a30ac68fd64e77af49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2c00754f5c57e1b717e6e6ad175a71596e2a695bb194bd6afa07b1ff2703173\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d533128bb474dbe4f16f7d3fab68a2913dccc37d7492370d85dd369a98996fa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a80531f48679263d18edf16664298a909464878ab0847bb31e653c38874306\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://696f939db5f4ccd1ab2842f40ff8478d9a59c3b3332b4eb3abb1c3aaaa3f630d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5012e6e3ff5a2e4f32bde959dde9677167fa10c95adf4950c261b8eeccff2b45\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e080cb9b75c8176f3cf1889ec9854f600be4e56f56cbdb52290a3d1faa0d701b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4867c0c1e7837b4eb034923a23a7960a2959d45083ada1b51c9744982274f20\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.518988 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4645d0fa-057e-498d-8b10-6897ac843624\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:37:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T15:38:49Z\\\",\\\"message\\\":\\\"W0320 15:38:48.549856 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0320 15:38:48.550224 1 crypto.go:601] Generating new CA for check-endpoints-signer@1774021128 cert, and key in /tmp/serving-cert-3737586956/serving-signer.crt, /tmp/serving-cert-3737586956/serving-signer.key\\\\nI0320 15:38:48.880267 1 observer_polling.go:159] Starting file observer\\\\nW0320 15:38:48.890357 1 builder.go:272] unable to get owner reference (falling back to namespace): Unauthorized\\\\nI0320 15:38:48.890655 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 15:38:48.891964 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3737586956/tls.crt::/tmp/serving-cert-3737586956/tls.key\\\\\\\"\\\\nF0320 15:38:49.206846 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:38:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:37:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:37:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:37:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:37:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.533604 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://369464562ae9ed7d9f1d559dad31a74264015245d49c4af4d36c76b615a74baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.550854 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vx6nr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f0d437-97b9-4f08-8136-4a13fe545b08\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de6c767f20a6e74205420194a4671853d8db5ca7daf50e48b0c4dae38f13431c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7vg7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vx6nr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.568530 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.588327 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.609116 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.635630 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f0207723-5d01-4034-bdcd-5bb28ff71021\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0a1f7adb5bfa0ecfb9494f83067bffcd6b2e61439a61b6da5630a1f60206824\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790cb7b11c2c11cb423a2473d35e50ee8711182bed0bc016199ba2fdd3d13610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88852343bc8d25f0c09b4902717eba0ac86db6b8518502a9ae6ae66a39a3b75a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2dfe6bba23862815aba074a0eeec8dbcc4b5efcc0b7b0c3a4f58b799b8e5c1f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c92dbda53de6c6468ab6e30e4048ae1a8222bcb147186f9e24437fdb3667f77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0581707426c4bdbaf5db426546606cf73d2194ee7d533c71ccd39e14a1e79688\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a0f1a07694529b13151ca34985f46212f27f8459b0f2b4d4969502b66e09a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T15:39:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t9mcf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rsfpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.656827 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gpmgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:40:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T15:40:22Z\\\",\\\"message\\\":\\\"2026-03-20T15:39:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6\\\\n2026-03-20T15:39:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0fb7731b-e63b-4e93-b8f5-ff0709ffe4f6 to /host/opt/cni/bin/\\\\n2026-03-20T15:39:37Z [verbose] multus-daemon started\\\\n2026-03-20T15:39:37Z [verbose] Readiness Indicator file check\\\\n2026-03-20T15:40:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T15:39:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:40:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6rhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gpmgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:41 crc kubenswrapper[4813]: I0320 15:40:41.671557 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:41Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:42 crc kubenswrapper[4813]: I0320 15:40:42.265817 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:42 crc kubenswrapper[4813]: I0320 15:40:42.265942 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:42 crc kubenswrapper[4813]: I0320 15:40:42.265844 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:42 crc kubenswrapper[4813]: I0320 15:40:42.266018 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:42 crc kubenswrapper[4813]: E0320 15:40:42.266151 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:42 crc kubenswrapper[4813]: E0320 15:40:42.266329 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:42 crc kubenswrapper[4813]: E0320 15:40:42.266657 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:42 crc kubenswrapper[4813]: E0320 15:40:42.266878 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:44 crc kubenswrapper[4813]: I0320 15:40:44.265280 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:44 crc kubenswrapper[4813]: I0320 15:40:44.265806 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:44 crc kubenswrapper[4813]: I0320 15:40:44.265949 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:44 crc kubenswrapper[4813]: E0320 15:40:44.265976 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:44 crc kubenswrapper[4813]: I0320 15:40:44.266016 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:44 crc kubenswrapper[4813]: E0320 15:40:44.266178 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:44 crc kubenswrapper[4813]: E0320 15:40:44.266335 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:44 crc kubenswrapper[4813]: E0320 15:40:44.266557 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:44 crc kubenswrapper[4813]: I0320 15:40:44.266870 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:40:44 crc kubenswrapper[4813]: E0320 15:40:44.267099 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:40:46 crc kubenswrapper[4813]: I0320 15:40:46.265752 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:46 crc kubenswrapper[4813]: I0320 15:40:46.265802 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:46 crc kubenswrapper[4813]: I0320 15:40:46.265803 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:46 crc kubenswrapper[4813]: E0320 15:40:46.265877 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:46 crc kubenswrapper[4813]: I0320 15:40:46.266062 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:46 crc kubenswrapper[4813]: E0320 15:40:46.266123 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:46 crc kubenswrapper[4813]: E0320 15:40:46.266256 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:46 crc kubenswrapper[4813]: E0320 15:40:46.266382 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:46 crc kubenswrapper[4813]: E0320 15:40:46.386753 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:48 crc kubenswrapper[4813]: I0320 15:40:48.265507 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:48 crc kubenswrapper[4813]: I0320 15:40:48.265565 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:48 crc kubenswrapper[4813]: I0320 15:40:48.265610 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:48 crc kubenswrapper[4813]: E0320 15:40:48.265649 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:48 crc kubenswrapper[4813]: I0320 15:40:48.265673 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:48 crc kubenswrapper[4813]: E0320 15:40:48.265747 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:48 crc kubenswrapper[4813]: E0320 15:40:48.265840 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:48 crc kubenswrapper[4813]: E0320 15:40:48.265966 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:50 crc kubenswrapper[4813]: I0320 15:40:50.264959 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:50 crc kubenswrapper[4813]: E0320 15:40:50.265078 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:50 crc kubenswrapper[4813]: I0320 15:40:50.265133 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:50 crc kubenswrapper[4813]: I0320 15:40:50.265232 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:50 crc kubenswrapper[4813]: E0320 15:40:50.265360 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:50 crc kubenswrapper[4813]: I0320 15:40:50.265434 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:50 crc kubenswrapper[4813]: E0320 15:40:50.265821 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:50 crc kubenswrapper[4813]: E0320 15:40:50.266079 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:50 crc kubenswrapper[4813]: I0320 15:40:50.806595 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:50 crc kubenswrapper[4813]: E0320 15:40:50.806813 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:40:50 crc kubenswrapper[4813]: E0320 15:40:50.806938 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs podName:5e5f925a-75e4-485f-9d5e-2be4c2c13616 nodeName:}" failed. No retries permitted until 2026-03-20 15:41:54.806910737 +0000 UTC m=+244.229613618 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs") pod "network-metrics-daemon-lc5px" (UID: "5e5f925a-75e4-485f-9d5e-2be4c2c13616") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.286545 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lc5px" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5f925a-75e4-485f-9d5e-2be4c2c13616\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xjq9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T15:39:46Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lc5px\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.307556 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T15:39:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62527182db50ed92146639eeaa13e614d36a0143117763ce3d0cd5d0c92b5c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49f791f54d5d3eaed243eb0a1973ebdc65f69bbd15572e86d1e653ea643dc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T15:39:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T15:40:51Z is after 2025-08-24T17:21:41Z" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.365708 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-rsfpf" podStartSLOduration=115.365687044 podStartE2EDuration="1m55.365687044s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.36512917 +0000 UTC m=+180.787832021" watchObservedRunningTime="2026-03-20 15:40:51.365687044 +0000 UTC m=+180.788389885" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.386313 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.386366 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.386383 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.386406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.386423 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T15:40:51Z","lastTransitionTime":"2026-03-20T15:40:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 15:40:51 crc kubenswrapper[4813]: E0320 15:40:51.387227 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.403042 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gpmgw" podStartSLOduration=115.403017821 podStartE2EDuration="1m55.403017821s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.385982593 +0000 UTC m=+180.808685434" watchObservedRunningTime="2026-03-20 15:40:51.403017821 +0000 UTC m=+180.825720672" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.403331 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=101.403323169 podStartE2EDuration="1m41.403323169s" podCreationTimestamp="2026-03-20 15:39:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.402636382 +0000 UTC m=+180.825339233" watchObservedRunningTime="2026-03-20 15:40:51.403323169 +0000 UTC m=+180.826026030" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.437154 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-czvmm" podStartSLOduration=116.43712714 podStartE2EDuration="1m56.43712714s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.436919835 +0000 UTC m=+180.859622686" watchObservedRunningTime="2026-03-20 15:40:51.43712714 +0000 UTC m=+180.859830021" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.449375 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt"] Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.449782 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.451645 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.452196 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.453157 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.453275 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.490587 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jmrs2" podStartSLOduration=115.490570084 podStartE2EDuration="1m55.490570084s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.489695512 +0000 UTC m=+180.912398383" watchObservedRunningTime="2026-03-20 15:40:51.490570084 +0000 UTC m=+180.913272945" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.507213 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=18.507196823 podStartE2EDuration="18.507196823s" podCreationTimestamp="2026-03-20 15:40:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.506759652 +0000 UTC m=+180.929462533" watchObservedRunningTime="2026-03-20 15:40:51.507196823 +0000 UTC m=+180.929899664" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.513495 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fde7973f-1003-4839-a94c-91f713c5719b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.513526 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fde7973f-1003-4839-a94c-91f713c5719b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.513544 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fde7973f-1003-4839-a94c-91f713c5719b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.513599 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fde7973f-1003-4839-a94c-91f713c5719b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.513623 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fde7973f-1003-4839-a94c-91f713c5719b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.518525 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.51850552 podStartE2EDuration="52.51850552s" podCreationTimestamp="2026-03-20 15:39:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.517786623 +0000 UTC m=+180.940489464" watchObservedRunningTime="2026-03-20 15:40:51.51850552 +0000 UTC m=+180.941208361" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.583790 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podStartSLOduration=115.583760355 podStartE2EDuration="1m55.583760355s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.581800947 +0000 UTC m=+181.004503798" watchObservedRunningTime="2026-03-20 15:40:51.583760355 +0000 UTC m=+181.006463196" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.615182 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fde7973f-1003-4839-a94c-91f713c5719b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.615436 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fde7973f-1003-4839-a94c-91f713c5719b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.615555 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fde7973f-1003-4839-a94c-91f713c5719b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.615642 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fde7973f-1003-4839-a94c-91f713c5719b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.615743 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fde7973f-1003-4839-a94c-91f713c5719b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.615849 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/fde7973f-1003-4839-a94c-91f713c5719b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.615311 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/fde7973f-1003-4839-a94c-91f713c5719b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.616431 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fde7973f-1003-4839-a94c-91f713c5719b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.621137 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fde7973f-1003-4839-a94c-91f713c5719b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.635999 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=92.635980489 podStartE2EDuration="1m32.635980489s" podCreationTimestamp="2026-03-20 15:39:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.635963938 +0000 UTC m=+181.058666779" watchObservedRunningTime="2026-03-20 15:40:51.635980489 +0000 UTC m=+181.058683330" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.636289 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=88.636284396 podStartE2EDuration="1m28.636284396s" podCreationTimestamp="2026-03-20 15:39:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.617286579 +0000 UTC m=+181.039989420" watchObservedRunningTime="2026-03-20 15:40:51.636284396 +0000 UTC m=+181.058987237" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.638937 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fde7973f-1003-4839-a94c-91f713c5719b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9gtdt\" (UID: \"fde7973f-1003-4839-a94c-91f713c5719b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.658023 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-vx6nr" podStartSLOduration=116.65800465 podStartE2EDuration="1m56.65800465s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:51.656998725 +0000 UTC m=+181.079701566" watchObservedRunningTime="2026-03-20 15:40:51.65800465 +0000 UTC m=+181.080707491" Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.680648 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.689856 4813 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Mar 20 15:40:51 crc kubenswrapper[4813]: I0320 15:40:51.761996 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" Mar 20 15:40:51 crc kubenswrapper[4813]: W0320 15:40:51.787801 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfde7973f_1003_4839_a94c_91f713c5719b.slice/crio-7a633fb4d3e7a326115f6b19534054e814d6104eb6acd43f9c5625c5669c4ebf WatchSource:0}: Error finding container 7a633fb4d3e7a326115f6b19534054e814d6104eb6acd43f9c5625c5669c4ebf: Status 404 returned error can't find the container with id 7a633fb4d3e7a326115f6b19534054e814d6104eb6acd43f9c5625c5669c4ebf Mar 20 15:40:52 crc kubenswrapper[4813]: I0320 15:40:52.252238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" event={"ID":"fde7973f-1003-4839-a94c-91f713c5719b","Type":"ContainerStarted","Data":"294e2cf14c434288780ed08ff1e691a43663619096cb2c49c5b436c8fe708afe"} Mar 20 15:40:52 crc kubenswrapper[4813]: I0320 15:40:52.252351 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" event={"ID":"fde7973f-1003-4839-a94c-91f713c5719b","Type":"ContainerStarted","Data":"7a633fb4d3e7a326115f6b19534054e814d6104eb6acd43f9c5625c5669c4ebf"} Mar 20 15:40:52 crc kubenswrapper[4813]: I0320 15:40:52.265072 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:52 crc kubenswrapper[4813]: I0320 15:40:52.265193 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:52 crc kubenswrapper[4813]: I0320 15:40:52.265092 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:52 crc kubenswrapper[4813]: E0320 15:40:52.265264 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:52 crc kubenswrapper[4813]: E0320 15:40:52.265448 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:52 crc kubenswrapper[4813]: I0320 15:40:52.265509 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:52 crc kubenswrapper[4813]: E0320 15:40:52.265631 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:52 crc kubenswrapper[4813]: E0320 15:40:52.265854 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:54 crc kubenswrapper[4813]: I0320 15:40:54.265208 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:54 crc kubenswrapper[4813]: E0320 15:40:54.265390 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:54 crc kubenswrapper[4813]: I0320 15:40:54.265583 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:54 crc kubenswrapper[4813]: E0320 15:40:54.265783 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:54 crc kubenswrapper[4813]: I0320 15:40:54.265387 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:54 crc kubenswrapper[4813]: E0320 15:40:54.266179 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:54 crc kubenswrapper[4813]: I0320 15:40:54.266561 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:54 crc kubenswrapper[4813]: E0320 15:40:54.266707 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:56 crc kubenswrapper[4813]: I0320 15:40:56.264991 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:56 crc kubenswrapper[4813]: I0320 15:40:56.265053 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:56 crc kubenswrapper[4813]: I0320 15:40:56.265064 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:56 crc kubenswrapper[4813]: E0320 15:40:56.265124 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:56 crc kubenswrapper[4813]: I0320 15:40:56.265239 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:56 crc kubenswrapper[4813]: E0320 15:40:56.265308 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:56 crc kubenswrapper[4813]: E0320 15:40:56.265806 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:56 crc kubenswrapper[4813]: E0320 15:40:56.265909 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:56 crc kubenswrapper[4813]: E0320 15:40:56.388407 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:40:58 crc kubenswrapper[4813]: I0320 15:40:58.265368 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:40:58 crc kubenswrapper[4813]: I0320 15:40:58.265368 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:40:58 crc kubenswrapper[4813]: E0320 15:40:58.266139 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:40:58 crc kubenswrapper[4813]: I0320 15:40:58.265401 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:40:58 crc kubenswrapper[4813]: I0320 15:40:58.265380 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:40:58 crc kubenswrapper[4813]: E0320 15:40:58.266364 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:40:58 crc kubenswrapper[4813]: E0320 15:40:58.266401 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:40:58 crc kubenswrapper[4813]: E0320 15:40:58.266717 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:40:59 crc kubenswrapper[4813]: I0320 15:40:59.267185 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:40:59 crc kubenswrapper[4813]: E0320 15:40:59.267472 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-dvvsh_openshift-ovn-kubernetes(32fae70f-6b1f-4935-9747-8080c9feb514)\"" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" Mar 20 15:41:00 crc kubenswrapper[4813]: I0320 15:41:00.265381 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:00 crc kubenswrapper[4813]: I0320 15:41:00.265381 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:00 crc kubenswrapper[4813]: I0320 15:41:00.265549 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:00 crc kubenswrapper[4813]: I0320 15:41:00.265624 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:00 crc kubenswrapper[4813]: E0320 15:41:00.266597 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:00 crc kubenswrapper[4813]: E0320 15:41:00.266462 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:00 crc kubenswrapper[4813]: E0320 15:41:00.266213 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:00 crc kubenswrapper[4813]: E0320 15:41:00.266649 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:01 crc kubenswrapper[4813]: E0320 15:41:01.389410 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:41:02 crc kubenswrapper[4813]: I0320 15:41:02.265417 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:02 crc kubenswrapper[4813]: I0320 15:41:02.265805 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:02 crc kubenswrapper[4813]: I0320 15:41:02.265898 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:02 crc kubenswrapper[4813]: I0320 15:41:02.265909 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:02 crc kubenswrapper[4813]: E0320 15:41:02.266098 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:02 crc kubenswrapper[4813]: E0320 15:41:02.266213 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:02 crc kubenswrapper[4813]: E0320 15:41:02.266338 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:02 crc kubenswrapper[4813]: E0320 15:41:02.266898 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:04 crc kubenswrapper[4813]: I0320 15:41:04.264781 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:04 crc kubenswrapper[4813]: E0320 15:41:04.264931 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:04 crc kubenswrapper[4813]: I0320 15:41:04.265005 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:04 crc kubenswrapper[4813]: I0320 15:41:04.265034 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:04 crc kubenswrapper[4813]: I0320 15:41:04.265111 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:04 crc kubenswrapper[4813]: E0320 15:41:04.265226 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:04 crc kubenswrapper[4813]: E0320 15:41:04.265306 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:04 crc kubenswrapper[4813]: E0320 15:41:04.265473 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:06 crc kubenswrapper[4813]: I0320 15:41:06.265166 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:06 crc kubenswrapper[4813]: I0320 15:41:06.265217 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:06 crc kubenswrapper[4813]: I0320 15:41:06.265273 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:06 crc kubenswrapper[4813]: E0320 15:41:06.265415 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:06 crc kubenswrapper[4813]: I0320 15:41:06.265471 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:06 crc kubenswrapper[4813]: E0320 15:41:06.265633 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:06 crc kubenswrapper[4813]: E0320 15:41:06.265825 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:06 crc kubenswrapper[4813]: E0320 15:41:06.265890 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:06 crc kubenswrapper[4813]: E0320 15:41:06.391139 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:41:08 crc kubenswrapper[4813]: I0320 15:41:08.265415 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:08 crc kubenswrapper[4813]: I0320 15:41:08.265461 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:08 crc kubenswrapper[4813]: I0320 15:41:08.265473 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:08 crc kubenswrapper[4813]: I0320 15:41:08.265461 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:08 crc kubenswrapper[4813]: E0320 15:41:08.265637 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:08 crc kubenswrapper[4813]: E0320 15:41:08.265745 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:08 crc kubenswrapper[4813]: E0320 15:41:08.265834 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:08 crc kubenswrapper[4813]: E0320 15:41:08.265898 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:09 crc kubenswrapper[4813]: I0320 15:41:09.327869 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/1.log" Mar 20 15:41:09 crc kubenswrapper[4813]: I0320 15:41:09.328748 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/0.log" Mar 20 15:41:09 crc kubenswrapper[4813]: I0320 15:41:09.328811 4813 generic.go:334] "Generic (PLEG): container finished" podID="a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1" containerID="8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d" exitCode=1 Mar 20 15:41:09 crc kubenswrapper[4813]: I0320 15:41:09.328882 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerDied","Data":"8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d"} Mar 20 15:41:09 crc kubenswrapper[4813]: I0320 15:41:09.328980 4813 scope.go:117] "RemoveContainer" containerID="2c3eed85ab1a6326d69f1bdb748cfc736287c1935fc07620d17da686ba80a147" Mar 20 15:41:09 crc kubenswrapper[4813]: I0320 15:41:09.329533 4813 scope.go:117] "RemoveContainer" containerID="8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d" Mar 20 15:41:09 crc kubenswrapper[4813]: E0320 15:41:09.329734 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-gpmgw_openshift-multus(a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1)\"" pod="openshift-multus/multus-gpmgw" podUID="a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1" Mar 20 15:41:09 crc kubenswrapper[4813]: I0320 15:41:09.368205 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9gtdt" podStartSLOduration=134.368176666 podStartE2EDuration="2m14.368176666s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:40:52.272836125 +0000 UTC m=+181.695539006" watchObservedRunningTime="2026-03-20 15:41:09.368176666 +0000 UTC m=+198.790879547" Mar 20 15:41:10 crc kubenswrapper[4813]: I0320 15:41:10.265618 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:10 crc kubenswrapper[4813]: I0320 15:41:10.265660 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:10 crc kubenswrapper[4813]: I0320 15:41:10.265696 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:10 crc kubenswrapper[4813]: I0320 15:41:10.265624 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:10 crc kubenswrapper[4813]: E0320 15:41:10.265806 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:10 crc kubenswrapper[4813]: E0320 15:41:10.265978 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:10 crc kubenswrapper[4813]: E0320 15:41:10.266112 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:10 crc kubenswrapper[4813]: E0320 15:41:10.266270 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:10 crc kubenswrapper[4813]: I0320 15:41:10.337585 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/1.log" Mar 20 15:41:11 crc kubenswrapper[4813]: E0320 15:41:11.391655 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:41:12 crc kubenswrapper[4813]: I0320 15:41:12.265798 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:12 crc kubenswrapper[4813]: I0320 15:41:12.265843 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:12 crc kubenswrapper[4813]: E0320 15:41:12.265966 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:12 crc kubenswrapper[4813]: I0320 15:41:12.265811 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:12 crc kubenswrapper[4813]: E0320 15:41:12.266192 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:12 crc kubenswrapper[4813]: E0320 15:41:12.266267 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:12 crc kubenswrapper[4813]: I0320 15:41:12.266684 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:12 crc kubenswrapper[4813]: E0320 15:41:12.266818 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:14 crc kubenswrapper[4813]: I0320 15:41:14.265811 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:14 crc kubenswrapper[4813]: I0320 15:41:14.265930 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:41:14 crc kubenswrapper[4813]: I0320 15:41:14.265955 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:14 crc kubenswrapper[4813]: E0320 15:41:14.266006 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:14 crc kubenswrapper[4813]: I0320 15:41:14.266018 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:14 crc kubenswrapper[4813]: I0320 15:41:14.266056 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:14 crc kubenswrapper[4813]: E0320 15:41:14.266165 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:14 crc kubenswrapper[4813]: E0320 15:41:14.266349 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:14 crc kubenswrapper[4813]: E0320 15:41:14.266544 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:15 crc kubenswrapper[4813]: I0320 15:41:15.130745 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-lc5px"] Mar 20 15:41:15 crc kubenswrapper[4813]: I0320 15:41:15.131180 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:15 crc kubenswrapper[4813]: E0320 15:41:15.131317 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:15 crc kubenswrapper[4813]: I0320 15:41:15.360027 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/3.log" Mar 20 15:41:15 crc kubenswrapper[4813]: I0320 15:41:15.364576 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerStarted","Data":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} Mar 20 15:41:15 crc kubenswrapper[4813]: I0320 15:41:15.365175 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:41:15 crc kubenswrapper[4813]: I0320 15:41:15.402301 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podStartSLOduration=139.402281565 podStartE2EDuration="2m19.402281565s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:15.399800514 +0000 UTC m=+204.822503395" watchObservedRunningTime="2026-03-20 15:41:15.402281565 +0000 UTC m=+204.824984416" Mar 20 15:41:16 crc kubenswrapper[4813]: I0320 15:41:16.265822 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:16 crc kubenswrapper[4813]: I0320 15:41:16.265853 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:16 crc kubenswrapper[4813]: I0320 15:41:16.265829 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:16 crc kubenswrapper[4813]: E0320 15:41:16.265951 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:16 crc kubenswrapper[4813]: E0320 15:41:16.266078 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:16 crc kubenswrapper[4813]: E0320 15:41:16.266128 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:16 crc kubenswrapper[4813]: I0320 15:41:16.266687 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:16 crc kubenswrapper[4813]: E0320 15:41:16.266907 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:16 crc kubenswrapper[4813]: E0320 15:41:16.393642 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.265326 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.265355 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.265542 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.265792 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.265885 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.266051 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.266135 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.266346 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.448890 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.449175 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:43:20.449126169 +0000 UTC m=+329.871829040 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.550031 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.550104 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.550137 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:18 crc kubenswrapper[4813]: I0320 15:41:18.550170 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550372 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550396 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550415 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550511 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 15:43:20.550454953 +0000 UTC m=+329.973157824 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550804 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550827 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550844 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550889 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 15:43:20.550874744 +0000 UTC m=+329.973577615 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.550952 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.551089 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:43:20.551051228 +0000 UTC m=+329.973754109 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.551183 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:41:18 crc kubenswrapper[4813]: E0320 15:41:18.551250 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 15:43:20.551230042 +0000 UTC m=+329.973933043 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 15:41:20 crc kubenswrapper[4813]: I0320 15:41:20.265900 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:20 crc kubenswrapper[4813]: I0320 15:41:20.265943 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:20 crc kubenswrapper[4813]: E0320 15:41:20.266081 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:20 crc kubenswrapper[4813]: E0320 15:41:20.266443 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:20 crc kubenswrapper[4813]: I0320 15:41:20.266786 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:20 crc kubenswrapper[4813]: E0320 15:41:20.266948 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:20 crc kubenswrapper[4813]: I0320 15:41:20.266786 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:20 crc kubenswrapper[4813]: E0320 15:41:20.267169 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:21 crc kubenswrapper[4813]: I0320 15:41:21.267471 4813 scope.go:117] "RemoveContainer" containerID="8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d" Mar 20 15:41:21 crc kubenswrapper[4813]: E0320 15:41:21.394534 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:41:22 crc kubenswrapper[4813]: I0320 15:41:22.265784 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:22 crc kubenswrapper[4813]: I0320 15:41:22.265790 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:22 crc kubenswrapper[4813]: E0320 15:41:22.266286 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:22 crc kubenswrapper[4813]: I0320 15:41:22.265854 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:22 crc kubenswrapper[4813]: E0320 15:41:22.266433 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:22 crc kubenswrapper[4813]: I0320 15:41:22.265826 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:22 crc kubenswrapper[4813]: E0320 15:41:22.266744 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:22 crc kubenswrapper[4813]: E0320 15:41:22.266807 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:22 crc kubenswrapper[4813]: I0320 15:41:22.396006 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/1.log" Mar 20 15:41:22 crc kubenswrapper[4813]: I0320 15:41:22.396087 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerStarted","Data":"3ad68338650caa0ff1469d0329f568e52ad46c8dccc1f1ff55ea58f5cf6d50eb"} Mar 20 15:41:24 crc kubenswrapper[4813]: I0320 15:41:24.265379 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:24 crc kubenswrapper[4813]: I0320 15:41:24.265541 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:24 crc kubenswrapper[4813]: I0320 15:41:24.265402 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:24 crc kubenswrapper[4813]: E0320 15:41:24.265637 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:24 crc kubenswrapper[4813]: E0320 15:41:24.265823 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:24 crc kubenswrapper[4813]: I0320 15:41:24.265893 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:24 crc kubenswrapper[4813]: E0320 15:41:24.265956 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:24 crc kubenswrapper[4813]: E0320 15:41:24.266166 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:26 crc kubenswrapper[4813]: I0320 15:41:26.265854 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:26 crc kubenswrapper[4813]: I0320 15:41:26.265860 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:26 crc kubenswrapper[4813]: I0320 15:41:26.265863 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:26 crc kubenswrapper[4813]: I0320 15:41:26.265871 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:26 crc kubenswrapper[4813]: E0320 15:41:26.266162 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lc5px" podUID="5e5f925a-75e4-485f-9d5e-2be4c2c13616" Mar 20 15:41:26 crc kubenswrapper[4813]: E0320 15:41:26.266262 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:41:26 crc kubenswrapper[4813]: E0320 15:41:26.266408 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 15:41:26 crc kubenswrapper[4813]: E0320 15:41:26.266530 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.264999 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.265042 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.265245 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.265028 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.269289 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.270868 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.270935 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.270943 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.270975 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Mar 20 15:41:28 crc kubenswrapper[4813]: I0320 15:41:28.277460 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.788526 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.878431 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.879180 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.880256 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-46qtn"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.880690 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.913805 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pc9l6"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.914568 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-gsjbb"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.914914 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.915821 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.916770 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.916870 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917079 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917238 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917276 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917429 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917437 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917639 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917735 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.917884 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.920234 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.920607 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.921401 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.921840 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.922138 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.922409 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.922529 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.922547 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hszjg"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.922654 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.923462 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-h2fdd"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.924270 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.925135 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.930567 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.931278 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.932867 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.933665 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.933809 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.934074 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.934253 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.934330 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-b6pj5"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.934416 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.935164 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.936993 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.937465 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.937944 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.938141 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.938140 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.938420 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.938824 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.940580 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lgf4j"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.941272 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.942036 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.942052 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b4swq"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.943413 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.944133 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-nr4cq"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.944700 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.945248 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.945909 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.946523 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.946875 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.947679 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.947869 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948055 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948130 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948217 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948318 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948342 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948319 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948443 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948507 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948601 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.948752 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.949953 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.950738 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.950793 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.951429 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952379 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-etcd-serving-ca\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952421 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/102c2351-cd99-4355-b31f-ac1fff221c48-node-pullsecrets\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952446 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-encryption-config\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952467 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-trusted-ca-bundle\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952503 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/102c2351-cd99-4355-b31f-ac1fff221c48-audit-dir\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952526 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-audit-policies\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952554 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxjqs\" (UniqueName: \"kubernetes.io/projected/8f783405-c032-4733-8e15-0dce88717000-kube-api-access-sxjqs\") pod \"dns-operator-744455d44c-b6pj5\" (UID: \"8f783405-c032-4733-8e15-0dce88717000\") " pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952574 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-etcd-client\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952611 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-client-ca\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952646 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-audit\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952696 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-image-import-ca\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952717 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952737 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-audit-dir\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952759 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxkxp\" (UniqueName: \"kubernetes.io/projected/a55eeaca-e582-46e3-8f26-3a72cdcced4f-kube-api-access-kxkxp\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952780 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-encryption-config\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952802 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f783405-c032-4733-8e15-0dce88717000-metrics-tls\") pod \"dns-operator-744455d44c-b6pj5\" (UID: \"8f783405-c032-4733-8e15-0dce88717000\") " pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952828 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55eeaca-e582-46e3-8f26-3a72cdcced4f-serving-cert\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952852 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952874 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-serving-cert\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952897 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-config\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952917 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmgtj\" (UniqueName: \"kubernetes.io/projected/102c2351-cd99-4355-b31f-ac1fff221c48-kube-api-access-lmgtj\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952941 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-etcd-client\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.952964 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-serving-cert\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.953010 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2x2x\" (UniqueName: \"kubernetes.io/projected/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-kube-api-access-p2x2x\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.953034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.953058 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-config\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.953291 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.954025 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.955261 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-m787j"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.955872 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-m787j" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.956638 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.957435 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.957659 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.957688 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.957789 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.957916 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.961538 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xzds4"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.965721 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.969181 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.969252 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.969869 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.969896 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.970108 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.970156 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.970781 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.971304 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.971380 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.971841 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.971903 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.972205 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.972246 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.972734 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.973039 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.973115 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.974424 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.974673 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.974759 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.974805 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.975130 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.975214 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.975620 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xv5bz"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.976335 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.991766 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.992224 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.992389 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.993249 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.993553 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.993749 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.995052 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.995621 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.997213 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.997875 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.998658 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr"] Mar 20 15:41:32 crc kubenswrapper[4813]: I0320 15:41:32.999216 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:32.999759 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.000681 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.002352 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.002957 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.003558 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.003737 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.003818 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.003905 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.003957 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.003989 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004061 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004071 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.003912 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004179 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004230 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004382 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004465 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004564 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.004684 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.005448 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.006247 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.006560 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.008327 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.008614 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.008769 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.008986 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.009810 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.011031 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.011686 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.012999 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.014523 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.014576 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.015408 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.017573 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.018238 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.020140 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-62k4m"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.020406 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.023147 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crtz5"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.023462 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.023619 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.023659 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.025647 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.025785 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.026008 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.026270 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.026434 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-llrbn"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.026801 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.045411 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.047016 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054124 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-etcd-serving-ca\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054176 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/102c2351-cd99-4355-b31f-ac1fff221c48-node-pullsecrets\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054206 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-encryption-config\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054231 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-trusted-ca-bundle\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054260 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/102c2351-cd99-4355-b31f-ac1fff221c48-audit-dir\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054279 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-audit-policies\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054300 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxjqs\" (UniqueName: \"kubernetes.io/projected/8f783405-c032-4733-8e15-0dce88717000-kube-api-access-sxjqs\") pod \"dns-operator-744455d44c-b6pj5\" (UID: \"8f783405-c032-4733-8e15-0dce88717000\") " pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054318 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-etcd-client\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054350 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-client-ca\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054371 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-audit\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054416 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-image-import-ca\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054435 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054453 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-audit-dir\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054469 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxkxp\" (UniqueName: \"kubernetes.io/projected/a55eeaca-e582-46e3-8f26-3a72cdcced4f-kube-api-access-kxkxp\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054502 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-encryption-config\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054523 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f783405-c032-4733-8e15-0dce88717000-metrics-tls\") pod \"dns-operator-744455d44c-b6pj5\" (UID: \"8f783405-c032-4733-8e15-0dce88717000\") " pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054545 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55eeaca-e582-46e3-8f26-3a72cdcced4f-serving-cert\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054562 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054581 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-serving-cert\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054600 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-config\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054618 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmgtj\" (UniqueName: \"kubernetes.io/projected/102c2351-cd99-4355-b31f-ac1fff221c48-kube-api-access-lmgtj\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054712 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-etcd-client\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054735 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-serving-cert\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054780 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2x2x\" (UniqueName: \"kubernetes.io/projected/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-kube-api-access-p2x2x\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054804 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.054826 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-config\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.055818 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-config\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.056217 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-audit-dir\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.056224 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-etcd-serving-ca\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.056316 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/102c2351-cd99-4355-b31f-ac1fff221c48-node-pullsecrets\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.057173 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/102c2351-cd99-4355-b31f-ac1fff221c48-audit-dir\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.057814 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-config\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.058018 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.058155 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-audit\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.058512 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-trusted-ca-bundle\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.058970 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.059431 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-client-ca\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.059717 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.059952 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/102c2351-cd99-4355-b31f-ac1fff221c48-image-import-ca\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.060730 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.060853 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.060956 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-etcd-client\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.061055 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.061395 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.062075 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-encryption-config\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.074247 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-encryption-config\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.074354 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.075465 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/102c2351-cd99-4355-b31f-ac1fff221c48-serving-cert\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.075857 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-serving-cert\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.075960 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-audit-policies\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.076441 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55eeaca-e582-46e3-8f26-3a72cdcced4f-serving-cert\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.076960 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-b6pj5"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.076971 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f783405-c032-4733-8e15-0dce88717000-metrics-tls\") pod \"dns-operator-744455d44c-b6pj5\" (UID: \"8f783405-c032-4733-8e15-0dce88717000\") " pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.076996 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6bsgz"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.077363 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.077412 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567020-4l7qk"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.077630 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.078989 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.079368 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.080106 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-etcd-client\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.080166 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.080692 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.080877 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.081210 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.082235 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.083993 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.086871 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.091112 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-gsjbb"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.092358 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pc9l6"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.093382 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-m4mtq"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.094312 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.094331 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.096095 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.096230 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-46qtn"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.100555 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hszjg"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.106203 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.108353 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-62k4m"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.111903 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.121547 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.121866 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.123015 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.124332 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xzds4"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.125218 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.126461 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b4swq"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.128207 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lgf4j"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.130044 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-h2fdd"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.131361 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.132389 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-m787j"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.133364 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-dnc2j"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.134318 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.135143 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.136539 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xv5bz"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.137553 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crtz5"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.138555 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.139539 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567020-4l7qk"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.140599 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.141098 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.141954 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.143936 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vvcfm"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.144646 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.144927 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.145919 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.147917 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.149456 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.150564 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-llrbn"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.151615 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vvcfm"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.152801 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dnc2j"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.154195 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.155724 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.157044 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6bsgz"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.158471 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-m4mtq"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.159506 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.161133 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-r65dl"] Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.161855 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.181914 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.201932 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.221572 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.248677 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.261244 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.281939 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.302254 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.320822 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.340907 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.360940 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.381214 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.401439 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.422080 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.441214 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.461408 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.487738 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.502322 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.522886 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.541016 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.561443 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.582535 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.602671 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.620938 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.642726 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.661607 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.682129 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.702228 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.722368 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.742750 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.761656 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.802207 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.823175 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.842285 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.861876 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.882368 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.901751 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.922202 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.941463 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.961318 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Mar 20 15:41:33 crc kubenswrapper[4813]: I0320 15:41:33.982824 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.002086 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.022333 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.039610 4813 request.go:700] Waited for 1.015629285s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.041618 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.061533 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.091215 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.101368 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.122040 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.141226 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.161476 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.181029 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.202547 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.246174 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.246842 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.258661 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.262110 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.283022 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.302236 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.322673 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.343172 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.361566 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.391715 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.432829 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxkxp\" (UniqueName: \"kubernetes.io/projected/a55eeaca-e582-46e3-8f26-3a72cdcced4f-kube-api-access-kxkxp\") pod \"controller-manager-879f6c89f-46qtn\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.449335 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxjqs\" (UniqueName: \"kubernetes.io/projected/8f783405-c032-4733-8e15-0dce88717000-kube-api-access-sxjqs\") pod \"dns-operator-744455d44c-b6pj5\" (UID: \"8f783405-c032-4733-8e15-0dce88717000\") " pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.462041 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.468736 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmgtj\" (UniqueName: \"kubernetes.io/projected/102c2351-cd99-4355-b31f-ac1fff221c48-kube-api-access-lmgtj\") pod \"apiserver-76f77b778f-h2fdd\" (UID: \"102c2351-cd99-4355-b31f-ac1fff221c48\") " pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.482443 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.508579 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.522403 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.536551 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2x2x\" (UniqueName: \"kubernetes.io/projected/e53d954e-e40b-4f89-a4fe-82ce7ca92ddb-kube-api-access-p2x2x\") pod \"apiserver-7bbb656c7d-d7f5s\" (UID: \"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.542363 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.561785 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.582532 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.601653 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.607475 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.622170 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.632153 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.642383 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.662693 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.683080 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.702058 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.722001 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.742118 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.748826 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.761360 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.781510 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.789246 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-46qtn"] Mar 20 15:41:34 crc kubenswrapper[4813]: W0320 15:41:34.801682 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda55eeaca_e582_46e3_8f26_3a72cdcced4f.slice/crio-0ca51d7ff11ef4730889fe22cfe28059bb083b6fed29361e1669830ab0fcbf7f WatchSource:0}: Error finding container 0ca51d7ff11ef4730889fe22cfe28059bb083b6fed29361e1669830ab0fcbf7f: Status 404 returned error can't find the container with id 0ca51d7ff11ef4730889fe22cfe28059bb083b6fed29361e1669830ab0fcbf7f Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.801788 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.823715 4813 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.841194 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.861279 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.875051 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-h2fdd"] Mar 20 15:41:34 crc kubenswrapper[4813]: W0320 15:41:34.881673 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod102c2351_cd99_4355_b31f_ac1fff221c48.slice/crio-f67b11a372b2696aa9f75d8f25c7fb3cff0104d46ee14b7e7708fdb9ff8414a0 WatchSource:0}: Error finding container f67b11a372b2696aa9f75d8f25c7fb3cff0104d46ee14b7e7708fdb9ff8414a0: Status 404 returned error can't find the container with id f67b11a372b2696aa9f75d8f25c7fb3cff0104d46ee14b7e7708fdb9ff8414a0 Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.882669 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.902750 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.906467 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-b6pj5"] Mar 20 15:41:34 crc kubenswrapper[4813]: W0320 15:41:34.921995 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f783405_c032_4733_8e15_0dce88717000.slice/crio-9e68a89bb27024162062944ec40198c426fbaa9d7291d02a686c96085c7186a4 WatchSource:0}: Error finding container 9e68a89bb27024162062944ec40198c426fbaa9d7291d02a686c96085c7186a4: Status 404 returned error can't find the container with id 9e68a89bb27024162062944ec40198c426fbaa9d7291d02a686c96085c7186a4 Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.922269 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.941822 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.957536 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s"] Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.961434 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Mar 20 15:41:34 crc kubenswrapper[4813]: I0320 15:41:34.981236 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.001605 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.021430 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.040260 4813 request.go:700] Waited for 1.878068405s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-tls&limit=500&resourceVersion=0 Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.041812 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074459 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-auth-proxy-config\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074518 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdg55\" (UniqueName: \"kubernetes.io/projected/2c9ef6af-6946-4a87-9b7f-1ff56f5ea780-kube-api-access-gdg55\") pod \"cluster-samples-operator-665b6dd947-f528s\" (UID: \"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074549 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074573 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f466308-78b7-43ba-bbaa-8de7afa3d22b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074597 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-certificates\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074617 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2c9ef6af-6946-4a87-9b7f-1ff56f5ea780-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f528s\" (UID: \"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-stats-auth\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074663 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-service-ca\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074684 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074705 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f466308-78b7-43ba-bbaa-8de7afa3d22b-config\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074727 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-audit-policies\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074824 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074881 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-config\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074906 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b76d626e-d614-41c8-bcf3-3df8fc57b668-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.074930 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075010 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/17ddafd0-c19d-4c6a-a75f-70b85668c360-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075048 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c099999d-9e76-4625-ad9a-2a9d0d293a63-config\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075076 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/17ddafd0-c19d-4c6a-a75f-70b85668c360-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075100 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thc7r\" (UniqueName: \"kubernetes.io/projected/ef59e32b-dd5c-4beb-b348-67c4847e80ce-kube-api-access-thc7r\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075147 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3cecb27-dbee-4cc4-be8b-79989833677e-serving-cert\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075171 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c099999d-9e76-4625-ad9a-2a9d0d293a63-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075192 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ef59e32b-dd5c-4beb-b348-67c4847e80ce-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075215 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8794\" (UniqueName: \"kubernetes.io/projected/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-kube-api-access-r8794\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075236 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-config\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075260 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-oauth-config\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075283 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075305 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp2sw\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-kube-api-access-vp2sw\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075371 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-service-ca\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075405 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef59e32b-dd5c-4beb-b348-67c4847e80ce-config\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075429 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f466308-78b7-43ba-bbaa-8de7afa3d22b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075492 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075684 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-client\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075725 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075752 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-ca\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075775 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075814 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-tls\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.075820 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:35.575803397 +0000 UTC m=+224.998506358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075848 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab628f6-a5aa-4cf7-af1e-774587397924-service-ca-bundle\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075868 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-machine-approver-tls\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075889 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-config\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.075919 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076016 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-default-certificate\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076041 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-oauth-serving-cert\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076062 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aecd414-5e15-473d-af30-c967633f216c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076090 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076115 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-trusted-ca-bundle\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076138 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjl6q\" (UniqueName: \"kubernetes.io/projected/c099999d-9e76-4625-ad9a-2a9d0d293a63-kube-api-access-wjl6q\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076160 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b76d626e-d614-41c8-bcf3-3df8fc57b668-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076181 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w4w6\" (UniqueName: \"kubernetes.io/projected/e190ce03-ff83-432a-a092-b7ed2d017aaf-kube-api-access-4w4w6\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076220 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-trusted-ca\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076244 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ef59e32b-dd5c-4beb-b348-67c4847e80ce-images\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076263 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mwpx\" (UniqueName: \"kubernetes.io/projected/56e4637c-fec1-435e-87db-4218601b4c45-kube-api-access-9mwpx\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076333 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076401 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e190ce03-ff83-432a-a092-b7ed2d017aaf-serving-cert\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076456 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aecd414-5e15-473d-af30-c967633f216c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076549 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3aecd414-5e15-473d-af30-c967633f216c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-bound-sa-token\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076692 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b76d626e-d614-41c8-bcf3-3df8fc57b668-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076729 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cswdk\" (UniqueName: \"kubernetes.io/projected/b3cecb27-dbee-4cc4-be8b-79989833677e-kube-api-access-cswdk\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076761 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076802 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7knhq\" (UniqueName: \"kubernetes.io/projected/3ab628f6-a5aa-4cf7-af1e-774587397924-kube-api-access-7knhq\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076864 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2824d\" (UniqueName: \"kubernetes.io/projected/b76d626e-d614-41c8-bcf3-3df8fc57b668-kube-api-access-2824d\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076924 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-client-ca\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.076988 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g85s\" (UniqueName: \"kubernetes.io/projected/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-kube-api-access-8g85s\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.077015 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-config\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.077043 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-serving-cert\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.077065 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56e4637c-fec1-435e-87db-4218601b4c45-audit-dir\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.077090 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-metrics-certs\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.177732 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.177884 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:35.677862249 +0000 UTC m=+225.100565110 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.177923 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.177969 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thc7r\" (UniqueName: \"kubernetes.io/projected/ef59e32b-dd5c-4beb-b348-67c4847e80ce-kube-api-access-thc7r\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.177994 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdkzq\" (UniqueName: \"kubernetes.io/projected/3b3ed106-44fd-4a77-a80e-d5e4faa7350a-kube-api-access-sdkzq\") pod \"migrator-59844c95c7-8h9gl\" (UID: \"3b3ed106-44fd-4a77-a80e-d5e4faa7350a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178017 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-metrics-tls\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178038 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1e06b90-d75c-4c56-a08d-3f71290dc764-srv-cert\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178060 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-config\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178086 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178115 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-oauth-config\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178142 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ef59e32b-dd5c-4beb-b348-67c4847e80ce-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178164 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8794\" (UniqueName: \"kubernetes.io/projected/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-kube-api-access-r8794\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178207 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-srv-cert\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178229 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljdn5\" (UniqueName: \"kubernetes.io/projected/1dce783b-8d1e-4171-8409-be2d773e2ab0-kube-api-access-ljdn5\") pod \"control-plane-machine-set-operator-78cbb6b69f-lzrhk\" (UID: \"1dce783b-8d1e-4171-8409-be2d773e2ab0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178271 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef59e32b-dd5c-4beb-b348-67c4847e80ce-config\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178305 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-csi-data-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178369 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1604f02d-e1f2-4ff6-8964-e038f8a64864-serving-cert\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178397 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178446 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-tls\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.178471 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-machine-approver-tls\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179164 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc82z\" (UniqueName: \"kubernetes.io/projected/df489f65-0ef7-4fd8-a402-9e816dd5620d-kube-api-access-vc82z\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179195 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-config\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179249 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179392 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482878eb-18a6-4c17-9d27-fc9a5fb650f2-config\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179419 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-oauth-serving-cert\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179439 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3ac360b6-7201-4b96-8aa8-4e6a54d9a918-cert\") pod \"ingress-canary-vvcfm\" (UID: \"3ac360b6-7201-4b96-8aa8-4e6a54d9a918\") " pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179461 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krtnd\" (UniqueName: \"kubernetes.io/projected/e6e86644-c67e-459b-9ad7-9f58409329a8-kube-api-access-krtnd\") pod \"multus-admission-controller-857f4d67dd-62k4m\" (UID: \"e6e86644-c67e-459b-9ad7-9f58409329a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179501 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x8qb\" (UniqueName: \"kubernetes.io/projected/e1e06b90-d75c-4c56-a08d-3f71290dc764-kube-api-access-2x8qb\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179541 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-default-certificate\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179563 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-trusted-ca-bundle\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179588 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179611 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d44b082-02e7-4ef0-9242-cbf1b709e91b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5xf4w\" (UID: \"1d44b082-02e7-4ef0-9242-cbf1b709e91b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179651 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b76d626e-d614-41c8-bcf3-3df8fc57b668-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179674 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w4w6\" (UniqueName: \"kubernetes.io/projected/e190ce03-ff83-432a-a092-b7ed2d017aaf-kube-api-access-4w4w6\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179700 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw8vv\" (UniqueName: \"kubernetes.io/projected/8a28763e-1762-4c17-8fb5-4692158a16a8-kube-api-access-zw8vv\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179723 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2f673f37-d358-4733-b2f2-3a7996b7615b-node-bootstrap-token\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179757 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fad111f-321a-4c9b-a945-ef93e6f95efb-serving-cert\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179777 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-socket-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179826 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179848 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179870 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-apiservice-cert\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179889 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-registration-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179911 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e190ce03-ff83-432a-a092-b7ed2d017aaf-serving-cert\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179932 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cswdk\" (UniqueName: \"kubernetes.io/projected/b3cecb27-dbee-4cc4-be8b-79989833677e-kube-api-access-cswdk\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.179968 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7knhq\" (UniqueName: \"kubernetes.io/projected/3ab628f6-a5aa-4cf7-af1e-774587397924-kube-api-access-7knhq\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180017 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2824d\" (UniqueName: \"kubernetes.io/projected/b76d626e-d614-41c8-bcf3-3df8fc57b668-kube-api-access-2824d\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180042 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/57db9fef-f17d-4d96-b830-8d79612fdfde-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180065 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g85s\" (UniqueName: \"kubernetes.io/projected/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-kube-api-access-8g85s\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180085 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-client-ca\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180106 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c8878ff8-6310-45c3-adcb-116e36a2dbde-secret-volume\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180128 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-serving-cert\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180150 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df346070-57b9-44a1-9e7d-6383faeea245-serving-cert\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180253 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z87m\" (UniqueName: \"kubernetes.io/projected/2fad111f-321a-4c9b-a945-ef93e6f95efb-kube-api-access-2z87m\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180314 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aae73cf2-faff-476e-8888-450853cea687-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180342 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mptnc\" (UniqueName: \"kubernetes.io/projected/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-kube-api-access-mptnc\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180368 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmkf9\" (UniqueName: \"kubernetes.io/projected/e4d83813-6127-4a79-ad93-bd5cafe64abd-kube-api-access-cmkf9\") pod \"auto-csr-approver-29567020-4l7qk\" (UID: \"e4d83813-6127-4a79-ad93-bd5cafe64abd\") " pod="openshift-infra/auto-csr-approver-29567020-4l7qk" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180401 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/aae73cf2-faff-476e-8888-450853cea687-images\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180421 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df489f65-0ef7-4fd8-a402-9e816dd5620d-trusted-ca\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180447 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-certificates\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180470 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-service-ca\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180510 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180537 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7zcm\" (UniqueName: \"kubernetes.io/projected/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-kube-api-access-v7zcm\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180573 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqw9g\" (UniqueName: \"kubernetes.io/projected/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-kube-api-access-qqw9g\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180597 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxlz5\" (UniqueName: \"kubernetes.io/projected/df346070-57b9-44a1-9e7d-6383faeea245-kube-api-access-vxlz5\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180607 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-config\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180636 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/17ddafd0-c19d-4c6a-a75f-70b85668c360-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180660 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6e86644-c67e-459b-9ad7-9f58409329a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-62k4m\" (UID: \"e6e86644-c67e-459b-9ad7-9f58409329a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180680 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df489f65-0ef7-4fd8-a402-9e816dd5620d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180768 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef59e32b-dd5c-4beb-b348-67c4847e80ce-config\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.180700 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-signing-cabundle\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.183508 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-client-ca\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.184588 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df346070-57b9-44a1-9e7d-6383faeea245-trusted-ca\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.184639 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/17ddafd0-c19d-4c6a-a75f-70b85668c360-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.184666 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fad111f-321a-4c9b-a945-ef93e6f95efb-config\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.184719 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c099999d-9e76-4625-ad9a-2a9d0d293a63-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.184742 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3cecb27-dbee-4cc4-be8b-79989833677e-serving-cert\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.184774 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-service-ca-bundle\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.184987 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-service-ca\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.185047 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-config\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.185302 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.185700 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.185897 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp2sw\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-kube-api-access-vp2sw\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.185946 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-mountpoint-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186174 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-service-ca\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186227 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f466308-78b7-43ba-bbaa-8de7afa3d22b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186587 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186640 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a28763e-1762-4c17-8fb5-4692158a16a8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186672 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf82x\" (UniqueName: \"kubernetes.io/projected/3ac360b6-7201-4b96-8aa8-4e6a54d9a918-kube-api-access-hf82x\") pod \"ingress-canary-vvcfm\" (UID: \"3ac360b6-7201-4b96-8aa8-4e6a54d9a918\") " pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186722 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186768 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-webhook-cert\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186801 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-client\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186828 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-signing-key\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.186855 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxkhp\" (UniqueName: \"kubernetes.io/projected/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-kube-api-access-kxkhp\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187014 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187090 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab628f6-a5aa-4cf7-af1e-774587397924-service-ca-bundle\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187129 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-ca\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.187165 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:35.687149166 +0000 UTC m=+225.109852117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187195 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187382 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-oauth-serving-cert\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187513 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1e06b90-d75c-4c56-a08d-3f71290dc764-profile-collector-cert\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187544 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlkx5\" (UniqueName: \"kubernetes.io/projected/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-kube-api-access-qlkx5\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.187899 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-service-ca\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188025 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v77r\" (UniqueName: \"kubernetes.io/projected/57db9fef-f17d-4d96-b830-8d79612fdfde-kube-api-access-4v77r\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188122 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aecd414-5e15-473d-af30-c967633f216c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188177 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c8878ff8-6310-45c3-adcb-116e36a2dbde-config-volume\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188203 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1604f02d-e1f2-4ff6-8964-e038f8a64864-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188232 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/982e4ba6-24e6-4165-9bae-c805735078e0-serving-cert\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188289 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x6tn\" (UniqueName: \"kubernetes.io/projected/3088bb81-3f95-4383-bbd5-ef89df01a20f-kube-api-access-8x6tn\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188315 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjl6q\" (UniqueName: \"kubernetes.io/projected/c099999d-9e76-4625-ad9a-2a9d0d293a63-kube-api-access-wjl6q\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188343 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gpxq\" (UniqueName: \"kubernetes.io/projected/1be7a1f1-df63-450b-aa07-3b5f76e9b6f7-kube-api-access-5gpxq\") pod \"downloads-7954f5f757-m787j\" (UID: \"1be7a1f1-df63-450b-aa07-3b5f76e9b6f7\") " pod="openshift-console/downloads-7954f5f757-m787j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188367 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xggm6\" (UniqueName: \"kubernetes.io/projected/1604f02d-e1f2-4ff6-8964-e038f8a64864-kube-api-access-xggm6\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188313 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-ca\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188441 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5mvt\" (UniqueName: \"kubernetes.io/projected/2f673f37-d358-4733-b2f2-3a7996b7615b-kube-api-access-l5mvt\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188468 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ef59e32b-dd5c-4beb-b348-67c4847e80ce-images\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188513 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mwpx\" (UniqueName: \"kubernetes.io/projected/56e4637c-fec1-435e-87db-4218601b4c45-kube-api-access-9mwpx\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188542 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-config\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.188773 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-trusted-ca-bundle\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.189291 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-trusted-ca\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.189394 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3aecd414-5e15-473d-af30-c967633f216c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.189421 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/482878eb-18a6-4c17-9d27-fc9a5fb650f2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.190746 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-trusted-ca\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.190790 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-certificates\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.191462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ef59e32b-dd5c-4beb-b348-67c4847e80ce-images\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.191817 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3cecb27-dbee-4cc4-be8b-79989833677e-serving-cert\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192192 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aecd414-5e15-473d-af30-c967633f216c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192504 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192672 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b3cecb27-dbee-4cc4-be8b-79989833677e-etcd-client\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192767 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-bound-sa-token\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192817 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b76d626e-d614-41c8-bcf3-3df8fc57b668-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192843 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aecd414-5e15-473d-af30-c967633f216c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192881 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192928 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-config\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192965 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-config-volume\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.192988 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193013 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1dce783b-8d1e-4171-8409-be2d773e2ab0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lzrhk\" (UID: \"1dce783b-8d1e-4171-8409-be2d773e2ab0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193049 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aae73cf2-faff-476e-8888-450853cea687-proxy-tls\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193070 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193093 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f89z\" (UniqueName: \"kubernetes.io/projected/1d44b082-02e7-4ef0-9242-cbf1b709e91b-kube-api-access-2f89z\") pod \"package-server-manager-789f6589d5-5xf4w\" (UID: \"1d44b082-02e7-4ef0-9242-cbf1b709e91b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193101 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab628f6-a5aa-4cf7-af1e-774587397924-service-ca-bundle\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193118 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-profile-collector-cert\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193199 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-metrics-certs\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193542 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aecd414-5e15-473d-af30-c967633f216c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193589 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56e4637c-fec1-435e-87db-4218601b4c45-audit-dir\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193633 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f466308-78b7-43ba-bbaa-8de7afa3d22b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193659 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2f673f37-d358-4733-b2f2-3a7996b7615b-certs\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193698 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xcz8\" (UniqueName: \"kubernetes.io/projected/aae73cf2-faff-476e-8888-450853cea687-kube-api-access-4xcz8\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193735 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-auth-proxy-config\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193760 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdg55\" (UniqueName: \"kubernetes.io/projected/2c9ef6af-6946-4a87-9b7f-1ff56f5ea780-kube-api-access-gdg55\") pod \"cluster-samples-operator-665b6dd947-f528s\" (UID: \"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193795 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/17ddafd0-c19d-4c6a-a75f-70b85668c360-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193831 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193861 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch6cv\" (UniqueName: \"kubernetes.io/projected/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-kube-api-access-ch6cv\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193886 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/482878eb-18a6-4c17-9d27-fc9a5fb650f2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.193976 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2c9ef6af-6946-4a87-9b7f-1ff56f5ea780-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f528s\" (UID: \"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194009 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-stats-auth\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194039 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f466308-78b7-43ba-bbaa-8de7afa3d22b-config\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194038 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56e4637c-fec1-435e-87db-4218601b4c45-audit-dir\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194110 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-audit-policies\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194138 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194165 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/57db9fef-f17d-4d96-b830-8d79612fdfde-proxy-tls\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194190 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b76d626e-d614-41c8-bcf3-3df8fc57b668-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194215 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194436 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a28763e-1762-4c17-8fb5-4692158a16a8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194492 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-auth-proxy-config\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194500 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-config\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194541 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df489f65-0ef7-4fd8-a402-9e816dd5620d-metrics-tls\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194568 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkhcw\" (UniqueName: \"kubernetes.io/projected/982e4ba6-24e6-4165-9bae-c805735078e0-kube-api-access-dkhcw\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194592 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-plugins-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194616 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df346070-57b9-44a1-9e7d-6383faeea245-config\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194847 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c099999d-9e76-4625-ad9a-2a9d0d293a63-config\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194877 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg6lq\" (UniqueName: \"kubernetes.io/projected/c8878ff8-6310-45c3-adcb-116e36a2dbde-kube-api-access-kg6lq\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.194902 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-tmpfs\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.195227 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-config\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.195569 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c099999d-9e76-4625-ad9a-2a9d0d293a63-config\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.196337 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-audit-policies\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.196410 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f466308-78b7-43ba-bbaa-8de7afa3d22b-config\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.197332 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-config\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.197401 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b76d626e-d614-41c8-bcf3-3df8fc57b668-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.200654 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e190ce03-ff83-432a-a092-b7ed2d017aaf-serving-cert\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.201300 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.201340 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-metrics-certs\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.201730 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-serving-cert\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.201833 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-default-certificate\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.201889 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-tls\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202106 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b76d626e-d614-41c8-bcf3-3df8fc57b668-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202153 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-oauth-config\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202261 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3ab628f6-a5aa-4cf7-af1e-774587397924-stats-auth\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202449 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-machine-approver-tls\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202700 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202857 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.202863 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ef59e32b-dd5c-4beb-b348-67c4847e80ce-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.203181 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.203227 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f466308-78b7-43ba-bbaa-8de7afa3d22b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.203397 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c099999d-9e76-4625-ad9a-2a9d0d293a63-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.203448 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2c9ef6af-6946-4a87-9b7f-1ff56f5ea780-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f528s\" (UID: \"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.204307 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.211301 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/17ddafd0-c19d-4c6a-a75f-70b85668c360-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.222240 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thc7r\" (UniqueName: \"kubernetes.io/projected/ef59e32b-dd5c-4beb-b348-67c4847e80ce-kube-api-access-thc7r\") pod \"machine-api-operator-5694c8668f-hszjg\" (UID: \"ef59e32b-dd5c-4beb-b348-67c4847e80ce\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.235298 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8794\" (UniqueName: \"kubernetes.io/projected/2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1-kube-api-access-r8794\") pod \"machine-approver-56656f9798-5n7dz\" (UID: \"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.252251 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.258686 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7knhq\" (UniqueName: \"kubernetes.io/projected/3ab628f6-a5aa-4cf7-af1e-774587397924-kube-api-access-7knhq\") pod \"router-default-5444994796-nr4cq\" (UID: \"3ab628f6-a5aa-4cf7-af1e-774587397924\") " pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.260459 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.282187 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2824d\" (UniqueName: \"kubernetes.io/projected/b76d626e-d614-41c8-bcf3-3df8fc57b668-kube-api-access-2824d\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.294384 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g85s\" (UniqueName: \"kubernetes.io/projected/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-kube-api-access-8g85s\") pod \"console-f9d7485db-gsjbb\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.295440 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.295884 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-mountpoint-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.296009 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:35.795981064 +0000 UTC m=+225.218683935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296222 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296529 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a28763e-1762-4c17-8fb5-4692158a16a8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.296684 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:35.796668561 +0000 UTC m=+225.219371402 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296729 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf82x\" (UniqueName: \"kubernetes.io/projected/3ac360b6-7201-4b96-8aa8-4e6a54d9a918-kube-api-access-hf82x\") pod \"ingress-canary-vvcfm\" (UID: \"3ac360b6-7201-4b96-8aa8-4e6a54d9a918\") " pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296756 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-webhook-cert\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296773 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-signing-key\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296803 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxkhp\" (UniqueName: \"kubernetes.io/projected/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-kube-api-access-kxkhp\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296824 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1e06b90-d75c-4c56-a08d-3f71290dc764-profile-collector-cert\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296838 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlkx5\" (UniqueName: \"kubernetes.io/projected/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-kube-api-access-qlkx5\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296855 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v77r\" (UniqueName: \"kubernetes.io/projected/57db9fef-f17d-4d96-b830-8d79612fdfde-kube-api-access-4v77r\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296894 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c8878ff8-6310-45c3-adcb-116e36a2dbde-config-volume\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296911 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1604f02d-e1f2-4ff6-8964-e038f8a64864-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296926 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/982e4ba6-24e6-4165-9bae-c805735078e0-serving-cert\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296945 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x6tn\" (UniqueName: \"kubernetes.io/projected/3088bb81-3f95-4383-bbd5-ef89df01a20f-kube-api-access-8x6tn\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296966 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gpxq\" (UniqueName: \"kubernetes.io/projected/1be7a1f1-df63-450b-aa07-3b5f76e9b6f7-kube-api-access-5gpxq\") pod \"downloads-7954f5f757-m787j\" (UID: \"1be7a1f1-df63-450b-aa07-3b5f76e9b6f7\") " pod="openshift-console/downloads-7954f5f757-m787j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296983 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xggm6\" (UniqueName: \"kubernetes.io/projected/1604f02d-e1f2-4ff6-8964-e038f8a64864-kube-api-access-xggm6\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297006 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5mvt\" (UniqueName: \"kubernetes.io/projected/2f673f37-d358-4733-b2f2-3a7996b7615b-kube-api-access-l5mvt\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297029 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-config\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297077 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/482878eb-18a6-4c17-9d27-fc9a5fb650f2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297108 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297126 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-config-volume\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297141 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297160 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1dce783b-8d1e-4171-8409-be2d773e2ab0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lzrhk\" (UID: \"1dce783b-8d1e-4171-8409-be2d773e2ab0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297187 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aae73cf2-faff-476e-8888-450853cea687-proxy-tls\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297210 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297229 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f89z\" (UniqueName: \"kubernetes.io/projected/1d44b082-02e7-4ef0-9242-cbf1b709e91b-kube-api-access-2f89z\") pod \"package-server-manager-789f6589d5-5xf4w\" (UID: \"1d44b082-02e7-4ef0-9242-cbf1b709e91b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297245 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-profile-collector-cert\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297264 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2f673f37-d358-4733-b2f2-3a7996b7615b-certs\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297281 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xcz8\" (UniqueName: \"kubernetes.io/projected/aae73cf2-faff-476e-8888-450853cea687-kube-api-access-4xcz8\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297304 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch6cv\" (UniqueName: \"kubernetes.io/projected/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-kube-api-access-ch6cv\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297321 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/482878eb-18a6-4c17-9d27-fc9a5fb650f2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297340 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/57db9fef-f17d-4d96-b830-8d79612fdfde-proxy-tls\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297357 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a28763e-1762-4c17-8fb5-4692158a16a8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297374 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df489f65-0ef7-4fd8-a402-9e816dd5620d-metrics-tls\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297389 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkhcw\" (UniqueName: \"kubernetes.io/projected/982e4ba6-24e6-4165-9bae-c805735078e0-kube-api-access-dkhcw\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297404 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-plugins-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297419 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df346070-57b9-44a1-9e7d-6383faeea245-config\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297437 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg6lq\" (UniqueName: \"kubernetes.io/projected/c8878ff8-6310-45c3-adcb-116e36a2dbde-kube-api-access-kg6lq\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297504 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-tmpfs\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297521 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297545 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdkzq\" (UniqueName: \"kubernetes.io/projected/3b3ed106-44fd-4a77-a80e-d5e4faa7350a-kube-api-access-sdkzq\") pod \"migrator-59844c95c7-8h9gl\" (UID: \"3b3ed106-44fd-4a77-a80e-d5e4faa7350a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297562 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-metrics-tls\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297576 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1e06b90-d75c-4c56-a08d-3f71290dc764-srv-cert\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297595 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297609 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-srv-cert\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297625 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljdn5\" (UniqueName: \"kubernetes.io/projected/1dce783b-8d1e-4171-8409-be2d773e2ab0-kube-api-access-ljdn5\") pod \"control-plane-machine-set-operator-78cbb6b69f-lzrhk\" (UID: \"1dce783b-8d1e-4171-8409-be2d773e2ab0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297645 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-csi-data-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297660 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1604f02d-e1f2-4ff6-8964-e038f8a64864-serving-cert\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297678 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc82z\" (UniqueName: \"kubernetes.io/projected/df489f65-0ef7-4fd8-a402-9e816dd5620d-kube-api-access-vc82z\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297698 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482878eb-18a6-4c17-9d27-fc9a5fb650f2-config\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297715 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3ac360b6-7201-4b96-8aa8-4e6a54d9a918-cert\") pod \"ingress-canary-vvcfm\" (UID: \"3ac360b6-7201-4b96-8aa8-4e6a54d9a918\") " pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297730 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krtnd\" (UniqueName: \"kubernetes.io/projected/e6e86644-c67e-459b-9ad7-9f58409329a8-kube-api-access-krtnd\") pod \"multus-admission-controller-857f4d67dd-62k4m\" (UID: \"e6e86644-c67e-459b-9ad7-9f58409329a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297746 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x8qb\" (UniqueName: \"kubernetes.io/projected/e1e06b90-d75c-4c56-a08d-3f71290dc764-kube-api-access-2x8qb\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297766 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d44b082-02e7-4ef0-9242-cbf1b709e91b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5xf4w\" (UID: \"1d44b082-02e7-4ef0-9242-cbf1b709e91b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297788 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw8vv\" (UniqueName: \"kubernetes.io/projected/8a28763e-1762-4c17-8fb5-4692158a16a8-kube-api-access-zw8vv\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297805 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2f673f37-d358-4733-b2f2-3a7996b7615b-node-bootstrap-token\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297821 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fad111f-321a-4c9b-a945-ef93e6f95efb-serving-cert\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297835 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-socket-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297853 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-apiservice-cert\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297867 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-registration-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297889 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/57db9fef-f17d-4d96-b830-8d79612fdfde-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297904 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c8878ff8-6310-45c3-adcb-116e36a2dbde-secret-volume\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297920 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df346070-57b9-44a1-9e7d-6383faeea245-serving-cert\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297937 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z87m\" (UniqueName: \"kubernetes.io/projected/2fad111f-321a-4c9b-a945-ef93e6f95efb-kube-api-access-2z87m\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297952 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aae73cf2-faff-476e-8888-450853cea687-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297969 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mptnc\" (UniqueName: \"kubernetes.io/projected/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-kube-api-access-mptnc\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.297984 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmkf9\" (UniqueName: \"kubernetes.io/projected/e4d83813-6127-4a79-ad93-bd5cafe64abd-kube-api-access-cmkf9\") pod \"auto-csr-approver-29567020-4l7qk\" (UID: \"e4d83813-6127-4a79-ad93-bd5cafe64abd\") " pod="openshift-infra/auto-csr-approver-29567020-4l7qk" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298001 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/aae73cf2-faff-476e-8888-450853cea687-images\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298016 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df489f65-0ef7-4fd8-a402-9e816dd5620d-trusted-ca\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298047 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7zcm\" (UniqueName: \"kubernetes.io/projected/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-kube-api-access-v7zcm\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298067 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqw9g\" (UniqueName: \"kubernetes.io/projected/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-kube-api-access-qqw9g\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298084 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxlz5\" (UniqueName: \"kubernetes.io/projected/df346070-57b9-44a1-9e7d-6383faeea245-kube-api-access-vxlz5\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298102 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6e86644-c67e-459b-9ad7-9f58409329a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-62k4m\" (UID: \"e6e86644-c67e-459b-9ad7-9f58409329a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298118 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df489f65-0ef7-4fd8-a402-9e816dd5620d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298133 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-signing-cabundle\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298148 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df346070-57b9-44a1-9e7d-6383faeea245-trusted-ca\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298172 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fad111f-321a-4c9b-a945-ef93e6f95efb-config\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298195 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-service-ca-bundle\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.298973 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-service-ca-bundle\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.299000 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-tmpfs\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.299616 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.296089 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-mountpoint-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.299698 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.299753 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-config-volume\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.303585 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a28763e-1762-4c17-8fb5-4692158a16a8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.305071 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.305582 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-metrics-tls\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.305981 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-plugins-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.306234 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-socket-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.306321 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-registration-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.306513 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-csi-data-dir\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.307320 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/982e4ba6-24e6-4165-9bae-c805735078e0-config\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.307576 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aae73cf2-faff-476e-8888-450853cea687-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.308146 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df489f65-0ef7-4fd8-a402-9e816dd5620d-metrics-tls\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.308341 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.309015 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482878eb-18a6-4c17-9d27-fc9a5fb650f2-config\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.309188 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c8878ff8-6310-45c3-adcb-116e36a2dbde-config-volume\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.309207 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/57db9fef-f17d-4d96-b830-8d79612fdfde-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.309291 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-signing-cabundle\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.309565 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/aae73cf2-faff-476e-8888-450853cea687-images\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.309760 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1604f02d-e1f2-4ff6-8964-e038f8a64864-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.309810 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fad111f-321a-4c9b-a945-ef93e6f95efb-config\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.310246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1e06b90-d75c-4c56-a08d-3f71290dc764-profile-collector-cert\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.310135 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1e06b90-d75c-4c56-a08d-3f71290dc764-srv-cert\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.310645 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-srv-cert\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.310765 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2f673f37-d358-4733-b2f2-3a7996b7615b-node-bootstrap-token\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.310959 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df346070-57b9-44a1-9e7d-6383faeea245-config\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.311166 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df346070-57b9-44a1-9e7d-6383faeea245-trusted-ca\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.311470 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e6e86644-c67e-459b-9ad7-9f58409329a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-62k4m\" (UID: \"e6e86644-c67e-459b-9ad7-9f58409329a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.311889 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df489f65-0ef7-4fd8-a402-9e816dd5620d-trusted-ca\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.312107 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-profile-collector-cert\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.312128 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c8878ff8-6310-45c3-adcb-116e36a2dbde-secret-volume\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.312300 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3ac360b6-7201-4b96-8aa8-4e6a54d9a918-cert\") pod \"ingress-canary-vvcfm\" (UID: \"3ac360b6-7201-4b96-8aa8-4e6a54d9a918\") " pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.312729 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2f673f37-d358-4733-b2f2-3a7996b7615b-certs\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.313427 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1604f02d-e1f2-4ff6-8964-e038f8a64864-serving-cert\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.313806 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fad111f-321a-4c9b-a945-ef93e6f95efb-serving-cert\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.313839 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-apiservice-cert\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314281 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a28763e-1762-4c17-8fb5-4692158a16a8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314307 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-signing-key\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314285 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aae73cf2-faff-476e-8888-450853cea687-proxy-tls\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314352 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/982e4ba6-24e6-4165-9bae-c805735078e0-serving-cert\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314430 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314517 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-webhook-cert\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314582 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1dce783b-8d1e-4171-8409-be2d773e2ab0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lzrhk\" (UID: \"1dce783b-8d1e-4171-8409-be2d773e2ab0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.314848 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/57db9fef-f17d-4d96-b830-8d79612fdfde-proxy-tls\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.317824 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/482878eb-18a6-4c17-9d27-fc9a5fb650f2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.318082 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d44b082-02e7-4ef0-9242-cbf1b709e91b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5xf4w\" (UID: \"1d44b082-02e7-4ef0-9242-cbf1b709e91b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.319268 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df346070-57b9-44a1-9e7d-6383faeea245-serving-cert\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.322373 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w4w6\" (UniqueName: \"kubernetes.io/projected/e190ce03-ff83-432a-a092-b7ed2d017aaf-kube-api-access-4w4w6\") pod \"route-controller-manager-6576b87f9c-zcdj4\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.335448 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cswdk\" (UniqueName: \"kubernetes.io/projected/b3cecb27-dbee-4cc4-be8b-79989833677e-kube-api-access-cswdk\") pod \"etcd-operator-b45778765-pc9l6\" (UID: \"b3cecb27-dbee-4cc4-be8b-79989833677e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.359216 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp2sw\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-kube-api-access-vp2sw\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.398661 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.399064 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2f466308-78b7-43ba-bbaa-8de7afa3d22b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-m6czc\" (UID: \"2f466308-78b7-43ba-bbaa-8de7afa3d22b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.399178 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:35.899159593 +0000 UTC m=+225.321862444 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.415734 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjl6q\" (UniqueName: \"kubernetes.io/projected/c099999d-9e76-4625-ad9a-2a9d0d293a63-kube-api-access-wjl6q\") pod \"openshift-apiserver-operator-796bbdcf4f-x2rnw\" (UID: \"c099999d-9e76-4625-ad9a-2a9d0d293a63\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.439911 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mwpx\" (UniqueName: \"kubernetes.io/projected/56e4637c-fec1-435e-87db-4218601b4c45-kube-api-access-9mwpx\") pod \"oauth-openshift-558db77b4-lgf4j\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.451139 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.453954 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" event={"ID":"8f783405-c032-4733-8e15-0dce88717000","Type":"ContainerStarted","Data":"5bdb620c67a1eb6421d67c4c8cbd7c5e29f79c7c4e5adea64e667caf2ba03a6c"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.453995 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" event={"ID":"8f783405-c032-4733-8e15-0dce88717000","Type":"ContainerStarted","Data":"c1f63397589885b378fa6859641d11763d182dd09acefee848bd869e35672e29"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.454009 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" event={"ID":"8f783405-c032-4733-8e15-0dce88717000","Type":"ContainerStarted","Data":"9e68a89bb27024162062944ec40198c426fbaa9d7291d02a686c96085c7186a4"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.455081 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3aecd414-5e15-473d-af30-c967633f216c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqkfl\" (UID: \"3aecd414-5e15-473d-af30-c967633f216c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.459439 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.462802 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" event={"ID":"a55eeaca-e582-46e3-8f26-3a72cdcced4f","Type":"ContainerStarted","Data":"04c8d1b1b0554baeed53a1603b54e133f53aad86281d466046736258c307a70b"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.462871 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" event={"ID":"a55eeaca-e582-46e3-8f26-3a72cdcced4f","Type":"ContainerStarted","Data":"0ca51d7ff11ef4730889fe22cfe28059bb083b6fed29361e1669830ab0fcbf7f"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.463015 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.464331 4813 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-46qtn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.464393 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" podUID="a55eeaca-e582-46e3-8f26-3a72cdcced4f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.465473 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nr4cq" event={"ID":"3ab628f6-a5aa-4cf7-af1e-774587397924","Type":"ContainerStarted","Data":"0c18efcc4d0a1453dc4db88411bc5e8ff5085e0c1cdebd45a856b3b3e3a66e26"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.465551 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nr4cq" event={"ID":"3ab628f6-a5aa-4cf7-af1e-774587397924","Type":"ContainerStarted","Data":"b363867bb0c479204c1e99b801fb1b31169b5e959cb5be8af838bb3214ae489d"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.466856 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" event={"ID":"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1","Type":"ContainerStarted","Data":"ee397fda6c032020d409a780abeb336c114ccb1a82ec86f145813ca6faa19702"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.469011 4813 generic.go:334] "Generic (PLEG): container finished" podID="e53d954e-e40b-4f89-a4fe-82ce7ca92ddb" containerID="9a1d41c5dd78fa23ec96224763fb98c9c37d8103cbc8c2da5145da1f424e0cdc" exitCode=0 Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.469104 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" event={"ID":"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb","Type":"ContainerDied","Data":"9a1d41c5dd78fa23ec96224763fb98c9c37d8103cbc8c2da5145da1f424e0cdc"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.469183 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" event={"ID":"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb","Type":"ContainerStarted","Data":"daf41220a85f17740ce9faa63eaddabc86f0716203a70f10f40bdf63928ffbe9"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.475370 4813 generic.go:334] "Generic (PLEG): container finished" podID="102c2351-cd99-4355-b31f-ac1fff221c48" containerID="429e4f466cd3f5af2240b5726697ba690ce4acee17e7ec34791a492c41bd1663" exitCode=0 Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.475416 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" event={"ID":"102c2351-cd99-4355-b31f-ac1fff221c48","Type":"ContainerDied","Data":"429e4f466cd3f5af2240b5726697ba690ce4acee17e7ec34791a492c41bd1663"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.475446 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" event={"ID":"102c2351-cd99-4355-b31f-ac1fff221c48","Type":"ContainerStarted","Data":"f67b11a372b2696aa9f75d8f25c7fb3cff0104d46ee14b7e7708fdb9ff8414a0"} Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.481212 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-bound-sa-token\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.491032 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.495215 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b76d626e-d614-41c8-bcf3-3df8fc57b668-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6rhtw\" (UID: \"b76d626e-d614-41c8-bcf3-3df8fc57b668\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.501752 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.503372 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.003352867 +0000 UTC m=+225.426055818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.510760 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.516278 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdg55\" (UniqueName: \"kubernetes.io/projected/2c9ef6af-6946-4a87-9b7f-1ff56f5ea780-kube-api-access-gdg55\") pod \"cluster-samples-operator-665b6dd947-f528s\" (UID: \"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.523698 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.541309 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdkzq\" (UniqueName: \"kubernetes.io/projected/3b3ed106-44fd-4a77-a80e-d5e4faa7350a-kube-api-access-sdkzq\") pod \"migrator-59844c95c7-8h9gl\" (UID: \"3b3ed106-44fd-4a77-a80e-d5e4faa7350a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.541927 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.568970 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.570351 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf82x\" (UniqueName: \"kubernetes.io/projected/3ac360b6-7201-4b96-8aa8-4e6a54d9a918-kube-api-access-hf82x\") pod \"ingress-canary-vvcfm\" (UID: \"3ac360b6-7201-4b96-8aa8-4e6a54d9a918\") " pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.574009 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.579832 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.581413 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkhcw\" (UniqueName: \"kubernetes.io/projected/982e4ba6-24e6-4165-9bae-c805735078e0-kube-api-access-dkhcw\") pod \"authentication-operator-69f744f599-llrbn\" (UID: \"982e4ba6-24e6-4165-9bae-c805735078e0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.586972 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.603008 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.603849 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.10382275 +0000 UTC m=+225.526525591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.604417 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.605161 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.105135152 +0000 UTC m=+225.527838253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.605517 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxkhp\" (UniqueName: \"kubernetes.io/projected/fd7a9d1a-a89e-4230-b7cc-60aac33d83da-kube-api-access-kxkhp\") pod \"catalog-operator-68c6474976-tlhjn\" (UID: \"fd7a9d1a-a89e-4230-b7cc-60aac33d83da\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.617196 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlkx5\" (UniqueName: \"kubernetes.io/projected/71b1bc2c-4da8-49e1-b3d4-d27901ea92bf-kube-api-access-qlkx5\") pod \"packageserver-d55dfcdfc-dlbqb\" (UID: \"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.629064 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.639244 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg6lq\" (UniqueName: \"kubernetes.io/projected/c8878ff8-6310-45c3-adcb-116e36a2dbde-kube-api-access-kg6lq\") pod \"collect-profiles-29567010-62qb5\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.661903 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7zcm\" (UniqueName: \"kubernetes.io/projected/e41db8d8-3508-40dc-bca8-ecd9ce41a6e3-kube-api-access-v7zcm\") pod \"csi-hostpathplugin-m4mtq\" (UID: \"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3\") " pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.674990 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xggm6\" (UniqueName: \"kubernetes.io/projected/1604f02d-e1f2-4ff6-8964-e038f8a64864-kube-api-access-xggm6\") pod \"openshift-config-operator-7777fb866f-xzds4\" (UID: \"1604f02d-e1f2-4ff6-8964-e038f8a64864\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.705216 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.705556 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.205531453 +0000 UTC m=+225.628234294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.706163 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5mvt\" (UniqueName: \"kubernetes.io/projected/2f673f37-d358-4733-b2f2-3a7996b7615b-kube-api-access-l5mvt\") pod \"machine-config-server-r65dl\" (UID: \"2f673f37-d358-4733-b2f2-3a7996b7615b\") " pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.720808 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.727759 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw8vv\" (UniqueName: \"kubernetes.io/projected/8a28763e-1762-4c17-8fb5-4692158a16a8-kube-api-access-zw8vv\") pod \"openshift-controller-manager-operator-756b6f6bc6-87msr\" (UID: \"8a28763e-1762-4c17-8fb5-4692158a16a8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.728007 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.731681 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-gsjbb"] Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.738050 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krtnd\" (UniqueName: \"kubernetes.io/projected/e6e86644-c67e-459b-9ad7-9f58409329a8-kube-api-access-krtnd\") pod \"multus-admission-controller-857f4d67dd-62k4m\" (UID: \"e6e86644-c67e-459b-9ad7-9f58409329a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.754737 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.759193 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f89z\" (UniqueName: \"kubernetes.io/projected/1d44b082-02e7-4ef0-9242-cbf1b709e91b-kube-api-access-2f89z\") pod \"package-server-manager-789f6589d5-5xf4w\" (UID: \"1d44b082-02e7-4ef0-9242-cbf1b709e91b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.768736 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.781237 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x8qb\" (UniqueName: \"kubernetes.io/projected/e1e06b90-d75c-4c56-a08d-3f71290dc764-kube-api-access-2x8qb\") pod \"olm-operator-6b444d44fb-fqp6p\" (UID: \"e1e06b90-d75c-4c56-a08d-3f71290dc764\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.797456 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.804129 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df489f65-0ef7-4fd8-a402-9e816dd5620d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.807846 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.808252 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.308240861 +0000 UTC m=+225.730943702 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.829534 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gpxq\" (UniqueName: \"kubernetes.io/projected/1be7a1f1-df63-450b-aa07-3b5f76e9b6f7-kube-api-access-5gpxq\") pod \"downloads-7954f5f757-m787j\" (UID: \"1be7a1f1-df63-450b-aa07-3b5f76e9b6f7\") " pod="openshift-console/downloads-7954f5f757-m787j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.829763 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vvcfm" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.833953 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-r65dl" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.850741 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw"] Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.854494 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x6tn\" (UniqueName: \"kubernetes.io/projected/3088bb81-3f95-4383-bbd5-ef89df01a20f-kube-api-access-8x6tn\") pod \"marketplace-operator-79b997595-crtz5\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.878115 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v77r\" (UniqueName: \"kubernetes.io/projected/57db9fef-f17d-4d96-b830-8d79612fdfde-kube-api-access-4v77r\") pod \"machine-config-controller-84d6567774-62nz7\" (UID: \"57db9fef-f17d-4d96-b830-8d79612fdfde\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.883012 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lgf4j"] Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.884350 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxlz5\" (UniqueName: \"kubernetes.io/projected/df346070-57b9-44a1-9e7d-6383faeea245-kube-api-access-vxlz5\") pod \"console-operator-58897d9998-xv5bz\" (UID: \"df346070-57b9-44a1-9e7d-6383faeea245\") " pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.893716 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4"] Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.900564 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-m787j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.907974 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hszjg"] Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.908435 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.908565 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.40853811 +0000 UTC m=+225.831240951 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.908822 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.908974 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:35 crc kubenswrapper[4813]: E0320 15:41:35.909138 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.409130854 +0000 UTC m=+225.831833695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.910098 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/482878eb-18a6-4c17-9d27-fc9a5fb650f2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5qnvb\" (UID: \"482878eb-18a6-4c17-9d27-fc9a5fb650f2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.916796 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.925110 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc82z\" (UniqueName: \"kubernetes.io/projected/df489f65-0ef7-4fd8-a402-9e816dd5620d-kube-api-access-vc82z\") pod \"ingress-operator-5b745b69d9-d4pzs\" (UID: \"df489f65-0ef7-4fd8-a402-9e816dd5620d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.934747 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.947531 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch6cv\" (UniqueName: \"kubernetes.io/projected/dcd2f0d3-cdac-480d-91cd-88c8a78005ba-kube-api-access-ch6cv\") pod \"dns-default-dnc2j\" (UID: \"dcd2f0d3-cdac-480d-91cd-88c8a78005ba\") " pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.961116 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.966010 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pc9l6"] Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.981418 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqw9g\" (UniqueName: \"kubernetes.io/projected/e6ff8412-c6e3-43a6-92e3-1644d33ee12f-kube-api-access-qqw9g\") pod \"kube-storage-version-migrator-operator-b67b599dd-bfhd6\" (UID: \"e6ff8412-c6e3-43a6-92e3-1644d33ee12f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.991031 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.997433 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z87m\" (UniqueName: \"kubernetes.io/projected/2fad111f-321a-4c9b-a945-ef93e6f95efb-kube-api-access-2z87m\") pod \"service-ca-operator-777779d784-4b2zc\" (UID: \"2fad111f-321a-4c9b-a945-ef93e6f95efb\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:35 crc kubenswrapper[4813]: I0320 15:41:35.998559 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.001246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mptnc\" (UniqueName: \"kubernetes.io/projected/750fbd37-7d78-4f5d-b78f-f8f2f09d703d-kube-api-access-mptnc\") pod \"service-ca-9c57cc56f-6bsgz\" (UID: \"750fbd37-7d78-4f5d-b78f-f8f2f09d703d\") " pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.007475 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.009354 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.009642 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.509621737 +0000 UTC m=+225.932324578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.012630 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.020202 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljdn5\" (UniqueName: \"kubernetes.io/projected/1dce783b-8d1e-4171-8409-be2d773e2ab0-kube-api-access-ljdn5\") pod \"control-plane-machine-set-operator-78cbb6b69f-lzrhk\" (UID: \"1dce783b-8d1e-4171-8409-be2d773e2ab0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.035964 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.039637 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmkf9\" (UniqueName: \"kubernetes.io/projected/e4d83813-6127-4a79-ad93-bd5cafe64abd-kube-api-access-cmkf9\") pod \"auto-csr-approver-29567020-4l7qk\" (UID: \"e4d83813-6127-4a79-ad93-bd5cafe64abd\") " pod="openshift-infra/auto-csr-approver-29567020-4l7qk" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.042896 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.048206 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.062153 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.077927 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xcz8\" (UniqueName: \"kubernetes.io/projected/aae73cf2-faff-476e-8888-450853cea687-kube-api-access-4xcz8\") pod \"machine-config-operator-74547568cd-pvn9m\" (UID: \"aae73cf2-faff-476e-8888-450853cea687\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.089969 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" Mar 20 15:41:36 crc kubenswrapper[4813]: W0320 15:41:36.092787 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3cecb27_dbee_4cc4_be8b_79989833677e.slice/crio-620c9b590a5976de72b1461885157100c8ae9bc96468703f0faa26623fe39fea WatchSource:0}: Error finding container 620c9b590a5976de72b1461885157100c8ae9bc96468703f0faa26623fe39fea: Status 404 returned error can't find the container with id 620c9b590a5976de72b1461885157100c8ae9bc96468703f0faa26623fe39fea Mar 20 15:41:36 crc kubenswrapper[4813]: W0320 15:41:36.093015 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef59e32b_dd5c_4beb_b348_67c4847e80ce.slice/crio-a00d796115756cce52ac8d9a2eb7ec87ed1091fdb350131e7ac6a452133c70f3 WatchSource:0}: Error finding container a00d796115756cce52ac8d9a2eb7ec87ed1091fdb350131e7ac6a452133c70f3: Status 404 returned error can't find the container with id a00d796115756cce52ac8d9a2eb7ec87ed1091fdb350131e7ac6a452133c70f3 Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.110808 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.111090 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.611074424 +0000 UTC m=+226.033777335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.118781 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.193733 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.211881 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.212508 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.71247444 +0000 UTC m=+226.135177271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.242742 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.261697 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.271813 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.310794 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s"] Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.315569 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.315922 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.815911035 +0000 UTC m=+226.238613876 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.404659 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:36 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:36 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:36 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.404705 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.416101 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.416223 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.916201314 +0000 UTC m=+226.338904155 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.416318 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.416661 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:36.916652165 +0000 UTC m=+226.339355006 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.498025 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" event={"ID":"e190ce03-ff83-432a-a092-b7ed2d017aaf","Type":"ContainerStarted","Data":"2095fd3cb97b33265e767196f881d137a84387741fcb30cf16902e0d8d0611b5"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.516878 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.517213 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.017198569 +0000 UTC m=+226.439901410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.539761 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" event={"ID":"e53d954e-e40b-4f89-a4fe-82ce7ca92ddb","Type":"ContainerStarted","Data":"36ad42dd3b6e2487a6183823e33dba191069ae67308b6d1d07d6898e25c9cecf"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.540494 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl"] Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.568668 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw"] Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.579122 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" event={"ID":"102c2351-cd99-4355-b31f-ac1fff221c48","Type":"ContainerStarted","Data":"38cb9652c84eb5061bfb83d01e0626f42e7477c0e59ff459c1f426807bad589d"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.597231 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" event={"ID":"ef59e32b-dd5c-4beb-b348-67c4847e80ce","Type":"ContainerStarted","Data":"a00d796115756cce52ac8d9a2eb7ec87ed1091fdb350131e7ac6a452133c70f3"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.598654 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" event={"ID":"56e4637c-fec1-435e-87db-4218601b4c45","Type":"ContainerStarted","Data":"a5dabc0e88d57b1e3fc2a4eacdec2873fdaf9b3e30e2232b0d091998fa5b95d3"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.616700 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-r65dl" event={"ID":"2f673f37-d358-4733-b2f2-3a7996b7615b","Type":"ContainerStarted","Data":"f7673031de3e4b38d570f3df45de84bb4dcd8a05c76e49b5f26d303f8345b5c2"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.620227 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.620793 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.120780559 +0000 UTC m=+226.543483400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.622734 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" event={"ID":"b76d626e-d614-41c8-bcf3-3df8fc57b668","Type":"ContainerStarted","Data":"50ed458f951be1d9f4d242c63521530a3cca7fe929d86f830827e25f1907910f"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.639770 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gsjbb" event={"ID":"97d209ef-db8b-4a75-bd06-bf1aea3a81dc","Type":"ContainerStarted","Data":"464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.639812 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gsjbb" event={"ID":"97d209ef-db8b-4a75-bd06-bf1aea3a81dc","Type":"ContainerStarted","Data":"970cf1ad540f5bdc5e2594cc352df34c50b92fd323081229a01f2bde0c5cdea8"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.643117 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" event={"ID":"b3cecb27-dbee-4cc4-be8b-79989833677e","Type":"ContainerStarted","Data":"620c9b590a5976de72b1461885157100c8ae9bc96468703f0faa26623fe39fea"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.651563 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" event={"ID":"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1","Type":"ContainerStarted","Data":"caefddfbc4726d608eedc9d06e8ef42b64e1033ae140f38ea45bdf79c04cb158"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.651610 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" event={"ID":"2e4ed81d-1d4c-4bd2-8ceb-4e959b8a7fb1","Type":"ContainerStarted","Data":"46ce0e9fead6eebe219a9a9375c5e236fe5682f778bc6f072a6785c79fcc8790"} Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.663538 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.721252 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.729737 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.229715499 +0000 UTC m=+226.652418340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.839121 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.839771 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.339758686 +0000 UTC m=+226.762461527 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.941031 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:36 crc kubenswrapper[4813]: E0320 15:41:36.942052 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.441813048 +0000 UTC m=+226.864515889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:36 crc kubenswrapper[4813]: I0320 15:41:36.965915 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-b6pj5" podStartSLOduration=160.965902028 podStartE2EDuration="2m40.965902028s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:36.917105122 +0000 UTC m=+226.339807963" watchObservedRunningTime="2026-03-20 15:41:36.965902028 +0000 UTC m=+226.388604869" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.042694 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.043152 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.543140522 +0000 UTC m=+226.965843363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.058030 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-r65dl" podStartSLOduration=4.058012466 podStartE2EDuration="4.058012466s" podCreationTimestamp="2026-03-20 15:41:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.017188576 +0000 UTC m=+226.439891417" watchObservedRunningTime="2026-03-20 15:41:37.058012466 +0000 UTC m=+226.480715307" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.143584 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.143664 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.643643735 +0000 UTC m=+227.066346566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.144038 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.144339 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.644326892 +0000 UTC m=+227.067029733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.196498 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-gsjbb" podStartSLOduration=161.19646157 podStartE2EDuration="2m41.19646157s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.136789097 +0000 UTC m=+226.559491938" watchObservedRunningTime="2026-03-20 15:41:37.19646157 +0000 UTC m=+226.619164411" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.246380 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.246997 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.746983089 +0000 UTC m=+227.169685930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.260022 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" podStartSLOduration=161.260002578 podStartE2EDuration="2m41.260002578s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.258265795 +0000 UTC m=+226.680968636" watchObservedRunningTime="2026-03-20 15:41:37.260002578 +0000 UTC m=+226.682705419" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.338688 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" podStartSLOduration=161.338670276 podStartE2EDuration="2m41.338670276s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.337962599 +0000 UTC m=+226.760665440" watchObservedRunningTime="2026-03-20 15:41:37.338670276 +0000 UTC m=+226.761373117" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.348994 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.349321 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.849309227 +0000 UTC m=+227.272012068 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.418978 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5n7dz" podStartSLOduration=162.418961464 podStartE2EDuration="2m42.418961464s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.418245777 +0000 UTC m=+226.840948618" watchObservedRunningTime="2026-03-20 15:41:37.418961464 +0000 UTC m=+226.841664305" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.450924 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.451308 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:37.951293296 +0000 UTC m=+227.373996137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.472096 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34112: no serving certificate available for the kubelet" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.552144 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.552526 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.052514747 +0000 UTC m=+227.475217588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.582090 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34120: no serving certificate available for the kubelet" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.648421 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-nr4cq" podStartSLOduration=161.648404767 podStartE2EDuration="2m41.648404767s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.647969067 +0000 UTC m=+227.070671908" watchObservedRunningTime="2026-03-20 15:41:37.648404767 +0000 UTC m=+227.071107608" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.658043 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.658418 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.158405823 +0000 UTC m=+227.581108664 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.664426 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" event={"ID":"e190ce03-ff83-432a-a092-b7ed2d017aaf","Type":"ContainerStarted","Data":"8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.666647 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.669546 4813 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-zcdj4 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.669601 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" podUID="e190ce03-ff83-432a-a092-b7ed2d017aaf" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.669936 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34130: no serving certificate available for the kubelet" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.675548 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" event={"ID":"102c2351-cd99-4355-b31f-ac1fff221c48","Type":"ContainerStarted","Data":"653c8907a81738dc5403ea0bd9d8bdb1aadd0538f6694c1b94187256aecfd98e"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.676851 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-r65dl" event={"ID":"2f673f37-d358-4733-b2f2-3a7996b7615b","Type":"ContainerStarted","Data":"0388e2b9d442c8702c19129adbaa0826a24ac31f93f7e760ddd0d52fd7e9484e"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.677915 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" event={"ID":"b3cecb27-dbee-4cc4-be8b-79989833677e","Type":"ContainerStarted","Data":"5eac3ed608bebbd945bfb4d47e45114820efd00cb8e1dbc4616d28387c6d9873"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.679434 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" event={"ID":"3b3ed106-44fd-4a77-a80e-d5e4faa7350a","Type":"ContainerStarted","Data":"e9b3255cd53ba12e5142fa1fa9e4f02e35bce67a6b9ce7b235a098462c48f164"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.679470 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" event={"ID":"3b3ed106-44fd-4a77-a80e-d5e4faa7350a","Type":"ContainerStarted","Data":"68821689db669ac7bf4a21664e56d4b998cd214d3d2ded83bb28a7bf56dd3a3d"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.684660 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" event={"ID":"ef59e32b-dd5c-4beb-b348-67c4847e80ce","Type":"ContainerStarted","Data":"e65203fb0e251261192073b68e74d4a032b62caf84a5187083761f00e81a29b6"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.684714 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" event={"ID":"ef59e32b-dd5c-4beb-b348-67c4847e80ce","Type":"ContainerStarted","Data":"ff5fe99fa503b00d688980e6e68d7175e416338a70794411c02a76d3158f3227"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.695032 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" event={"ID":"56e4637c-fec1-435e-87db-4218601b4c45","Type":"ContainerStarted","Data":"959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.695606 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.697500 4813 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-lgf4j container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" start-of-body= Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.697627 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" podUID="56e4637c-fec1-435e-87db-4218601b4c45" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.699145 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" event={"ID":"c099999d-9e76-4625-ad9a-2a9d0d293a63","Type":"ContainerStarted","Data":"f8291419c10ebb569780bd2daf92d39f1dfa3186e6f5b7c90e9538507fa53376"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.699213 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" event={"ID":"c099999d-9e76-4625-ad9a-2a9d0d293a63","Type":"ContainerStarted","Data":"d6aef919f2ac3c64249bdadae7ebdcbbb677400d14557aa00a07a679fab7ad3c"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.705449 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" event={"ID":"b76d626e-d614-41c8-bcf3-3df8fc57b668","Type":"ContainerStarted","Data":"c32ffa43f7322176de9f8b0718a6bd7452915afef74d73cb2558f53aff962535"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.719859 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" event={"ID":"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780","Type":"ContainerStarted","Data":"f499ef1495ef8c16170c6a04513ff66e7275d4c91a928c084e7d2be0da74a353"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.719921 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" event={"ID":"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780","Type":"ContainerStarted","Data":"715c74b108471b13d205c35f097cd33eeaffe7ec5ddbe847acac46652f2e6723"} Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.759667 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.762770 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.26275503 +0000 UTC m=+227.685457871 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.768046 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:37 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:37 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:37 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.768084 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.780582 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34134: no serving certificate available for the kubelet" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.864372 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.864570 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.364539205 +0000 UTC m=+227.787242056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.866416 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.866853 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.366839522 +0000 UTC m=+227.789542363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.893188 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34142: no serving certificate available for the kubelet" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.943234 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-x2rnw" podStartSLOduration=162.943216034 podStartE2EDuration="2m42.943216034s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.937066813 +0000 UTC m=+227.359769664" watchObservedRunningTime="2026-03-20 15:41:37.943216034 +0000 UTC m=+227.365918875" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.968423 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:37 crc kubenswrapper[4813]: E0320 15:41:37.968783 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.46876825 +0000 UTC m=+227.891471091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.980542 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" podStartSLOduration=161.980522999 podStartE2EDuration="2m41.980522999s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:37.979301419 +0000 UTC m=+227.402004260" watchObservedRunningTime="2026-03-20 15:41:37.980522999 +0000 UTC m=+227.403225840" Mar 20 15:41:37 crc kubenswrapper[4813]: I0320 15:41:37.992725 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34158: no serving certificate available for the kubelet" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.036382 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" podStartSLOduration=163.036368907 podStartE2EDuration="2m43.036368907s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.034929982 +0000 UTC m=+227.457632833" watchObservedRunningTime="2026-03-20 15:41:38.036368907 +0000 UTC m=+227.459071748" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.053325 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-hszjg" podStartSLOduration=162.053307933 podStartE2EDuration="2m42.053307933s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.052862732 +0000 UTC m=+227.475565573" watchObservedRunningTime="2026-03-20 15:41:38.053307933 +0000 UTC m=+227.476010774" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.071164 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.071505 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.571462698 +0000 UTC m=+227.994165539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.075801 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34174: no serving certificate available for the kubelet" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.094810 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rhtw" podStartSLOduration=162.0947918 podStartE2EDuration="2m42.0947918s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.094449091 +0000 UTC m=+227.517151932" watchObservedRunningTime="2026-03-20 15:41:38.0947918 +0000 UTC m=+227.517494641" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.103968 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.106040 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.141094 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vvcfm"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.149858 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-pc9l6" podStartSLOduration=162.149842259 podStartE2EDuration="2m42.149842259s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.148858775 +0000 UTC m=+227.571561616" watchObservedRunningTime="2026-03-20 15:41:38.149842259 +0000 UTC m=+227.572545090" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.163920 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.171874 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.172110 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.672073464 +0000 UTC m=+228.094776305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.176331 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" podStartSLOduration=163.176313548 podStartE2EDuration="2m43.176313548s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.175940469 +0000 UTC m=+227.598643310" watchObservedRunningTime="2026-03-20 15:41:38.176313548 +0000 UTC m=+227.599016389" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.196952 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34176: no serving certificate available for the kubelet" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.217188 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.228279 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-llrbn"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.274033 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-m4mtq"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.275066 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.275532 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.775510549 +0000 UTC m=+228.198213390 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.279925 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:38 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:38 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:38 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.279986 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.365352 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.381526 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.382044 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.88202736 +0000 UTC m=+228.304730201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.394958 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dnc2j"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.413978 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.416257 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.449907 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p"] Mar 20 15:41:38 crc kubenswrapper[4813]: W0320 15:41:38.454697 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1e06b90_d75c_4c56_a08d_3f71290dc764.slice/crio-5f195b70208a73b38b00c0002686963d30f84893cd04282d245182576028bbaa WatchSource:0}: Error finding container 5f195b70208a73b38b00c0002686963d30f84893cd04282d245182576028bbaa: Status 404 returned error can't find the container with id 5f195b70208a73b38b00c0002686963d30f84893cd04282d245182576028bbaa Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.456291 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.482307 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-m787j"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.483021 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.483442 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:38.983426096 +0000 UTC m=+228.406128937 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.536003 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6bsgz"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.536731 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567020-4l7qk"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.538642 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xv5bz"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.544950 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xzds4"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.584208 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.584620 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.084587916 +0000 UTC m=+228.507290767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.584697 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.587592 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.087577309 +0000 UTC m=+228.510280160 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.589441 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.602501 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crtz5"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.613226 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.623890 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.638935 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.638988 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.659082 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-62k4m"] Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.686214 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.686554 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.186538935 +0000 UTC m=+228.609241766 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: W0320 15:41:38.700371 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf346070_57b9_44a1_9e7d_6383faeea245.slice/crio-742e4d1f275be9d183e8d68feb3bd3aa5045b387e3e272ed886850c01a19ee26 WatchSource:0}: Error finding container 742e4d1f275be9d183e8d68feb3bd3aa5045b387e3e272ed886850c01a19ee26: Status 404 returned error can't find the container with id 742e4d1f275be9d183e8d68feb3bd3aa5045b387e3e272ed886850c01a19ee26 Mar 20 15:41:38 crc kubenswrapper[4813]: W0320 15:41:38.717005 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4d83813_6127_4a79_ad93_bd5cafe64abd.slice/crio-2d647d24952c798ff2878fc9bf8974892d134d8924cc3363028ac84d476d58ed WatchSource:0}: Error finding container 2d647d24952c798ff2878fc9bf8974892d134d8924cc3363028ac84d476d58ed: Status 404 returned error can't find the container with id 2d647d24952c798ff2878fc9bf8974892d134d8924cc3363028ac84d476d58ed Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.721689 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 15:41:38 crc kubenswrapper[4813]: W0320 15:41:38.721701 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fad111f_321a_4c9b_a945_ef93e6f95efb.slice/crio-ab4c891c2c652df67d81d231ce7e946aa4c79afa9998fa7cf7a7d4f0a2a34264 WatchSource:0}: Error finding container ab4c891c2c652df67d81d231ce7e946aa4c79afa9998fa7cf7a7d4f0a2a34264: Status 404 returned error can't find the container with id ab4c891c2c652df67d81d231ce7e946aa4c79afa9998fa7cf7a7d4f0a2a34264 Mar 20 15:41:38 crc kubenswrapper[4813]: W0320 15:41:38.723562 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1604f02d_e1f2_4ff6_8964_e038f8a64864.slice/crio-9c6f06fef842304c61b32c487c1559264e3e009f833f299d12e1b966084aff69 WatchSource:0}: Error finding container 9c6f06fef842304c61b32c487c1559264e3e009f833f299d12e1b966084aff69: Status 404 returned error can't find the container with id 9c6f06fef842304c61b32c487c1559264e3e009f833f299d12e1b966084aff69 Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.750253 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w"] Mar 20 15:41:38 crc kubenswrapper[4813]: W0320 15:41:38.764981 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3088bb81_3f95_4383_bbd5_ef89df01a20f.slice/crio-49ab4f97d30558b888bfe2192cf038288fefa3b23912c8809195e1cc65e05656 WatchSource:0}: Error finding container 49ab4f97d30558b888bfe2192cf038288fefa3b23912c8809195e1cc65e05656: Status 404 returned error can't find the container with id 49ab4f97d30558b888bfe2192cf038288fefa3b23912c8809195e1cc65e05656 Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.767240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" event={"ID":"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3","Type":"ContainerStarted","Data":"83610254eb38c05bac5b88f81efb17bf2742cb1798c9ae0f8be1e4d6ced3213e"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.775828 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" event={"ID":"e1e06b90-d75c-4c56-a08d-3f71290dc764","Type":"ContainerStarted","Data":"5f195b70208a73b38b00c0002686963d30f84893cd04282d245182576028bbaa"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.779843 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" event={"ID":"e4d83813-6127-4a79-ad93-bd5cafe64abd","Type":"ContainerStarted","Data":"2d647d24952c798ff2878fc9bf8974892d134d8924cc3363028ac84d476d58ed"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.781096 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" event={"ID":"df346070-57b9-44a1-9e7d-6383faeea245","Type":"ContainerStarted","Data":"742e4d1f275be9d183e8d68feb3bd3aa5045b387e3e272ed886850c01a19ee26"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.786969 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.787318 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.287306445 +0000 UTC m=+228.710009286 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.801385 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" event={"ID":"57db9fef-f17d-4d96-b830-8d79612fdfde","Type":"ContainerStarted","Data":"66f28a538921ce356e3d837c6e77e5fd95fc4aa6f94eea04fd28658342713cbb"} Mar 20 15:41:38 crc kubenswrapper[4813]: W0320 15:41:38.815806 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod482878eb_18a6_4c17_9d27_fc9a5fb650f2.slice/crio-8be054f62167beaff5f14007587fb42a74d98ab463c766353ba7e5c2e0ff22c9 WatchSource:0}: Error finding container 8be054f62167beaff5f14007587fb42a74d98ab463c766353ba7e5c2e0ff22c9: Status 404 returned error can't find the container with id 8be054f62167beaff5f14007587fb42a74d98ab463c766353ba7e5c2e0ff22c9 Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.816175 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" event={"ID":"e6ff8412-c6e3-43a6-92e3-1644d33ee12f","Type":"ContainerStarted","Data":"bf693f952c8f14192ec58de5dd7be2a3912eee27a6658dc4a9b290f143b06e48"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.843777 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vvcfm" event={"ID":"3ac360b6-7201-4b96-8aa8-4e6a54d9a918","Type":"ContainerStarted","Data":"00def57f9fb05b1685295080dfea4bdb88d429c29968a145555fe6c613143a19"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.844284 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vvcfm" event={"ID":"3ac360b6-7201-4b96-8aa8-4e6a54d9a918","Type":"ContainerStarted","Data":"63f1d15fc0ba7e6716e9df6c706a3a73619a1776d50b751a5044d13290f06577"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.846931 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-m787j" event={"ID":"1be7a1f1-df63-450b-aa07-3b5f76e9b6f7","Type":"ContainerStarted","Data":"621f54444fd65a92337911f4b52ea3a864c017f7d147c574f175cda272476bca"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.853745 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" event={"ID":"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf","Type":"ContainerStarted","Data":"32d32c26bd147fa80f27eafd8afd26b4df9b5fdd4185cb5faeb9dcea69a1e06c"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.859245 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.859459 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" event={"ID":"71b1bc2c-4da8-49e1-b3d4-d27901ea92bf","Type":"ContainerStarted","Data":"6a2f50cbf4efd47bcc34d89266efdbadf730bc528ecf627465ec6562cf707c87"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.857631 4813 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-dlbqb container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.859842 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" podUID="71b1bc2c-4da8-49e1-b3d4-d27901ea92bf" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.866375 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vvcfm" podStartSLOduration=6.866353983 podStartE2EDuration="6.866353983s" podCreationTimestamp="2026-03-20 15:41:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.862680993 +0000 UTC m=+228.285383834" watchObservedRunningTime="2026-03-20 15:41:38.866353983 +0000 UTC m=+228.289056824" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.869503 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" event={"ID":"3aecd414-5e15-473d-af30-c967633f216c","Type":"ContainerStarted","Data":"678dc53631288332d0332271d6ce504020dc1dba3ff39fae2d8acabb0a4ce47f"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.880105 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" event={"ID":"3b3ed106-44fd-4a77-a80e-d5e4faa7350a","Type":"ContainerStarted","Data":"c3884f9ab714439ba57468269237e263c4c9e62bae0ef8c7149e4ac296fd4c08"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.881244 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34184: no serving certificate available for the kubelet" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.885966 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dnc2j" event={"ID":"dcd2f0d3-cdac-480d-91cd-88c8a78005ba","Type":"ContainerStarted","Data":"c9831318d432cd91ea424860e49b20c4119b5b2577ffdae5cec68ad9777a2458"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.887517 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.887766 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.387749377 +0000 UTC m=+228.810452208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.888177 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.889072 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.389059249 +0000 UTC m=+228.811762090 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.903790 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" podStartSLOduration=162.9037769 podStartE2EDuration="2m42.9037769s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.901925885 +0000 UTC m=+228.324628736" watchObservedRunningTime="2026-03-20 15:41:38.9037769 +0000 UTC m=+228.326479741" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.911680 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" event={"ID":"750fbd37-7d78-4f5d-b78f-f8f2f09d703d","Type":"ContainerStarted","Data":"8a9342b2e7dc9e8a42772c29fb48f8e5a8a013bbc1394e403df22d47efb8ec5f"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.927256 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8h9gl" podStartSLOduration=162.927236505 podStartE2EDuration="2m42.927236505s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.922344205 +0000 UTC m=+228.345047036" watchObservedRunningTime="2026-03-20 15:41:38.927236505 +0000 UTC m=+228.349939346" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.934473 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" event={"ID":"c8878ff8-6310-45c3-adcb-116e36a2dbde","Type":"ContainerStarted","Data":"d1b0e96f44f5cc0fa2eacab059b56866043aadc93aea98dc70b7a30ef5639e85"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.934524 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" event={"ID":"c8878ff8-6310-45c3-adcb-116e36a2dbde","Type":"ContainerStarted","Data":"5f91945f69f54ae738de1cb86d53f5d4ae07b916657cfb061d8282b65f62ce91"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.938128 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" event={"ID":"1dce783b-8d1e-4171-8409-be2d773e2ab0","Type":"ContainerStarted","Data":"203b8ed58d6ce3e70beab5e3dda73d1790cffb42eb4e64ad6fec8d2bc365442f"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.939910 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" event={"ID":"982e4ba6-24e6-4165-9bae-c805735078e0","Type":"ContainerStarted","Data":"b82843c6030a20349f6ab87085002203498e6ea230d711720dceb964e480f312"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.942935 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" event={"ID":"aae73cf2-faff-476e-8888-450853cea687","Type":"ContainerStarted","Data":"1e902973312e00171bc41d038e6cc1d1521b1315e04ba8d85197c94d73054a7e"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.943786 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" event={"ID":"2f466308-78b7-43ba-bbaa-8de7afa3d22b","Type":"ContainerStarted","Data":"4c5ad2141ae5dd81bdf1c8da021fa8c08de0342ea9b99e7333a308699a141ca5"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.975355 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" podStartSLOduration=163.975338994 podStartE2EDuration="2m43.975338994s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:38.973155681 +0000 UTC m=+228.395858522" watchObservedRunningTime="2026-03-20 15:41:38.975338994 +0000 UTC m=+228.398041825" Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.976133 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" event={"ID":"2c9ef6af-6946-4a87-9b7f-1ff56f5ea780","Type":"ContainerStarted","Data":"eb096772de41a7b9d616425944b4818dd82e07a0126bd0ce24bc564906bfebed"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.984573 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" event={"ID":"fd7a9d1a-a89e-4230-b7cc-60aac33d83da","Type":"ContainerStarted","Data":"9a99f599ac01f4492513572360e62f3f68e4be60b87b9b6ecc2e279755ecfe2e"} Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.995878 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:38 crc kubenswrapper[4813]: E0320 15:41:38.997018 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.496992895 +0000 UTC m=+228.919695796 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:38 crc kubenswrapper[4813]: I0320 15:41:38.999745 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.008275 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" podStartSLOduration=164.008257981 podStartE2EDuration="2m44.008257981s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:39.001643399 +0000 UTC m=+228.424346240" watchObservedRunningTime="2026-03-20 15:41:39.008257981 +0000 UTC m=+228.430960822" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.076035 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f528s" podStartSLOduration=164.076017962 podStartE2EDuration="2m44.076017962s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:39.073940971 +0000 UTC m=+228.496643812" watchObservedRunningTime="2026-03-20 15:41:39.076017962 +0000 UTC m=+228.498720803" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.097943 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.106950 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.6069356 +0000 UTC m=+229.029638441 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.199496 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.199860 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.699846307 +0000 UTC m=+229.122549138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.267121 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:39 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:39 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:39 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.267171 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.301116 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.301465 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.801449948 +0000 UTC m=+229.224152799 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.345694 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.403610 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.403921 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:39.90390679 +0000 UTC m=+229.326609631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.504609 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.505262 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.005231053 +0000 UTC m=+229.427934044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.607766 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.608724 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.608928 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.609112 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.109083139 +0000 UTC m=+229.531785980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.609268 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.609652 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.109635343 +0000 UTC m=+229.532338184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.635754 4813 patch_prober.go:28] interesting pod/apiserver-76f77b778f-h2fdd container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]log ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]etcd ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/start-apiserver-admission-initializer ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/generic-apiserver-start-informers ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/max-in-flight-filter ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/storage-object-count-tracker-hook ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/image.openshift.io-apiserver-caches ok Mar 20 15:41:39 crc kubenswrapper[4813]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Mar 20 15:41:39 crc kubenswrapper[4813]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/project.openshift.io-projectcache ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/openshift.io-startinformers ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/openshift.io-restmapperupdater ok Mar 20 15:41:39 crc kubenswrapper[4813]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Mar 20 15:41:39 crc kubenswrapper[4813]: livez check failed Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.635806 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" podUID="102c2351-cd99-4355-b31f-ac1fff221c48" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.714349 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.714691 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.214668147 +0000 UTC m=+229.637370988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.748939 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.749795 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.761643 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.816594 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.817654 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.317642951 +0000 UTC m=+229.740345792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.918097 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.918280 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.418244128 +0000 UTC m=+229.840946959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.918385 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:39 crc kubenswrapper[4813]: E0320 15:41:39.918709 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.418702039 +0000 UTC m=+229.841404880 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.991299 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-llrbn" event={"ID":"982e4ba6-24e6-4165-9bae-c805735078e0","Type":"ContainerStarted","Data":"a7bfacb1f33a8cd0d8415b752f71e1e0ccab4d5f615240ddd0a0d2fabd558329"} Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.993286 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dnc2j" event={"ID":"dcd2f0d3-cdac-480d-91cd-88c8a78005ba","Type":"ContainerStarted","Data":"00a32c252117a2e07f78bf62e21ee30d1198d568d052da73d037d619a76b2166"} Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.994413 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" event={"ID":"2f466308-78b7-43ba-bbaa-8de7afa3d22b","Type":"ContainerStarted","Data":"5b7c2bab3356c34121e11a5b65a234e80eaf6984f30a67d00ce27c35e793244c"} Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.997098 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" event={"ID":"1dce783b-8d1e-4171-8409-be2d773e2ab0","Type":"ContainerStarted","Data":"9d17ff001474d3ac8878f9433bae8f788950dfc07fd7b8a60d079eae29e6b0e5"} Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.998698 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" event={"ID":"2fad111f-321a-4c9b-a945-ef93e6f95efb","Type":"ContainerStarted","Data":"2a2e0e63dcd8a4b193464f497dc294262df5c44e071d83961d84ca2eadef1c68"} Mar 20 15:41:39 crc kubenswrapper[4813]: I0320 15:41:39.998725 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" event={"ID":"2fad111f-321a-4c9b-a945-ef93e6f95efb","Type":"ContainerStarted","Data":"ab4c891c2c652df67d81d231ce7e946aa4c79afa9998fa7cf7a7d4f0a2a34264"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.001349 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" event={"ID":"482878eb-18a6-4c17-9d27-fc9a5fb650f2","Type":"ContainerStarted","Data":"9d64896fc2af89d7a107b969a778e3d799693d1a9151a3281260dba40b118fc5"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.001390 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" event={"ID":"482878eb-18a6-4c17-9d27-fc9a5fb650f2","Type":"ContainerStarted","Data":"8be054f62167beaff5f14007587fb42a74d98ab463c766353ba7e5c2e0ff22c9"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.003941 4813 generic.go:334] "Generic (PLEG): container finished" podID="1604f02d-e1f2-4ff6-8964-e038f8a64864" containerID="33c552ee98576bbf0cd6d12f1a0bd9d60197aa80c00463603e8b56560f0fa14b" exitCode=0 Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.004015 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" event={"ID":"1604f02d-e1f2-4ff6-8964-e038f8a64864","Type":"ContainerDied","Data":"33c552ee98576bbf0cd6d12f1a0bd9d60197aa80c00463603e8b56560f0fa14b"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.004043 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" event={"ID":"1604f02d-e1f2-4ff6-8964-e038f8a64864","Type":"ContainerStarted","Data":"9c6f06fef842304c61b32c487c1559264e3e009f833f299d12e1b966084aff69"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.006333 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" event={"ID":"df489f65-0ef7-4fd8-a402-9e816dd5620d","Type":"ContainerStarted","Data":"11644b05d445bed4cf4acfb28d5b5e14a08ac829af9d38b5b1d28116d1411d3e"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.006361 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" event={"ID":"df489f65-0ef7-4fd8-a402-9e816dd5620d","Type":"ContainerStarted","Data":"36932e33dcdab022f710e500b474d751388bf70a4e81707efc2557df32e87e87"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.007807 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" event={"ID":"8a28763e-1762-4c17-8fb5-4692158a16a8","Type":"ContainerStarted","Data":"9beec6ca512c86a7ed899ae91d5700e2f6aa6d84fa2ce3f5e24a1cd340707f70"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.007833 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" event={"ID":"8a28763e-1762-4c17-8fb5-4692158a16a8","Type":"ContainerStarted","Data":"86770fc3d6a234bdceb75fb5227f14810258644f406cef3d010e176005ee7eaa"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.009377 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-m787j" event={"ID":"1be7a1f1-df63-450b-aa07-3b5f76e9b6f7","Type":"ContainerStarted","Data":"cdd9a52d25a16214f60ca1e639fc57a682d7f6c9dc7085e0f9e2cf85b7cef9fd"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.010043 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-m787j" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.011599 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" event={"ID":"3aecd414-5e15-473d-af30-c967633f216c","Type":"ContainerStarted","Data":"c0f0a38d2bfcc3b123743ac394c38011dbc71b06d1d5a4ffed25228b4d514a35"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.012038 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-m787j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.012101 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m787j" podUID="1be7a1f1-df63-450b-aa07-3b5f76e9b6f7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.013058 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" event={"ID":"750fbd37-7d78-4f5d-b78f-f8f2f09d703d","Type":"ContainerStarted","Data":"d485b17e57b477ac526aabb4c96551ae9a023f21be5e86044fd0f81eb9e2fc3e"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.014578 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" event={"ID":"df346070-57b9-44a1-9e7d-6383faeea245","Type":"ContainerStarted","Data":"1bbb681f0af92aa4acd814aa5193e984ac8f8a8a47fa190ebe4ad4cd13b33f39"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.015339 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.016623 4813 patch_prober.go:28] interesting pod/console-operator-58897d9998-xv5bz container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.016872 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" podUID="df346070-57b9-44a1-9e7d-6383faeea245" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.019151 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m6czc" podStartSLOduration=164.019141091 podStartE2EDuration="2m44.019141091s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.01747917 +0000 UTC m=+229.440182011" watchObservedRunningTime="2026-03-20 15:41:40.019141091 +0000 UTC m=+229.441843922" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.019815 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.020020 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.519992372 +0000 UTC m=+229.942695223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.020068 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.020972 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.520961525 +0000 UTC m=+229.943664366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.024800 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" event={"ID":"1d44b082-02e7-4ef0-9242-cbf1b709e91b","Type":"ContainerStarted","Data":"070e2b6d87500505e9c6c1cf908cbf13250ad94da74121b8739d8bfb7a9fb03d"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.024860 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" event={"ID":"1d44b082-02e7-4ef0-9242-cbf1b709e91b","Type":"ContainerStarted","Data":"f206f056729ec65aa46cb9ba6ce32a7e166880d5e80412c95e0229185505ca28"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.026705 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" event={"ID":"e6ff8412-c6e3-43a6-92e3-1644d33ee12f","Type":"ContainerStarted","Data":"c89231193a1bff5bbdfbbffe474b2be00c04f6500d488d6bc8f4e546840f52d4"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.029119 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" event={"ID":"aae73cf2-faff-476e-8888-450853cea687","Type":"ContainerStarted","Data":"6d93033668e378a61a0539fa3dbbfcdf96e95eab89d9135e4be856ae117869cb"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.029150 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" event={"ID":"aae73cf2-faff-476e-8888-450853cea687","Type":"ContainerStarted","Data":"68fee4246bf23c7b1452fe1bd2e0d86de3554dc6034c01674e0007d2a4c081d7"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.034407 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" event={"ID":"e1e06b90-d75c-4c56-a08d-3f71290dc764","Type":"ContainerStarted","Data":"c70f5ade450ad615d40a5043409cedcafce47a8bf38edcec970de99495ac37d2"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.036911 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.037023 4813 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-fqp6p container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.037060 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" podUID="e1e06b90-d75c-4c56-a08d-3f71290dc764" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.042119 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqkfl" podStartSLOduration=164.042103063 podStartE2EDuration="2m44.042103063s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.04034375 +0000 UTC m=+229.463046601" watchObservedRunningTime="2026-03-20 15:41:40.042103063 +0000 UTC m=+229.464805904" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.055778 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5qnvb" podStartSLOduration=164.055760608 podStartE2EDuration="2m44.055760608s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.055725467 +0000 UTC m=+229.478428308" watchObservedRunningTime="2026-03-20 15:41:40.055760608 +0000 UTC m=+229.478463449" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.059366 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" event={"ID":"3088bb81-3f95-4383-bbd5-ef89df01a20f","Type":"ContainerStarted","Data":"3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.059401 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" event={"ID":"3088bb81-3f95-4383-bbd5-ef89df01a20f","Type":"ContainerStarted","Data":"49ab4f97d30558b888bfe2192cf038288fefa3b23912c8809195e1cc65e05656"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.060153 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.062347 4813 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-crtz5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.062408 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.081737 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6bsgz" podStartSLOduration=164.081720605 podStartE2EDuration="2m44.081720605s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.081308345 +0000 UTC m=+229.504011186" watchObservedRunningTime="2026-03-20 15:41:40.081720605 +0000 UTC m=+229.504423436" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.085512 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" event={"ID":"e6e86644-c67e-459b-9ad7-9f58409329a8","Type":"ContainerStarted","Data":"d8265336878c2070d14e2e5709129fb5a7aae368be64c155f10ee43eacb021b8"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.090871 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" event={"ID":"fd7a9d1a-a89e-4230-b7cc-60aac33d83da","Type":"ContainerStarted","Data":"650d894a26d40f24bdeea9b75cfe1a0de372057d66cbeaee597191007e3e1779"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.092128 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.093278 4813 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-tlhjn container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.093343 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" podUID="fd7a9d1a-a89e-4230-b7cc-60aac33d83da" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.096594 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" event={"ID":"57db9fef-f17d-4d96-b830-8d79612fdfde","Type":"ContainerStarted","Data":"c236e007346e9828049f71f9d3ae7f272dfaa72fbbd1b499cb1f3fc50dac7add"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.096625 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" event={"ID":"57db9fef-f17d-4d96-b830-8d79612fdfde","Type":"ContainerStarted","Data":"120504a4d4624120982795a35bd935fcdb8683b278df3cb5bb34e9bb643df1f0"} Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.120899 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.122734 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.62271192 +0000 UTC m=+230.045414761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.144611 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d7f5s" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.195454 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-87msr" podStartSLOduration=164.195436692 podStartE2EDuration="2m44.195436692s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.19496474 +0000 UTC m=+229.617667581" watchObservedRunningTime="2026-03-20 15:41:40.195436692 +0000 UTC m=+229.618139533" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.196527 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" podStartSLOduration=164.196522079 podStartE2EDuration="2m44.196522079s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.168903882 +0000 UTC m=+229.591606723" watchObservedRunningTime="2026-03-20 15:41:40.196522079 +0000 UTC m=+229.619224920" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.209972 4813 ???:1] "http: TLS handshake error from 192.168.126.11:34194: no serving certificate available for the kubelet" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.215238 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-m787j" podStartSLOduration=164.215221997 podStartE2EDuration="2m44.215221997s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.214688734 +0000 UTC m=+229.637391575" watchObservedRunningTime="2026-03-20 15:41:40.215221997 +0000 UTC m=+229.637924838" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.224200 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.227002 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.726989815 +0000 UTC m=+230.149692656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.240450 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lzrhk" podStartSLOduration=164.240433255 podStartE2EDuration="2m44.240433255s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.238945148 +0000 UTC m=+229.661647979" watchObservedRunningTime="2026-03-20 15:41:40.240433255 +0000 UTC m=+229.663136096" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.255099 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4b2zc" podStartSLOduration=164.255074613 podStartE2EDuration="2m44.255074613s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.253886334 +0000 UTC m=+229.676589175" watchObservedRunningTime="2026-03-20 15:41:40.255074613 +0000 UTC m=+229.677777444" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.272592 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:40 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:40 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:40 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.272641 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.295995 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pvn9m" podStartSLOduration=164.295978416 podStartE2EDuration="2m44.295978416s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.277971135 +0000 UTC m=+229.700673976" watchObservedRunningTime="2026-03-20 15:41:40.295978416 +0000 UTC m=+229.718681257" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.297736 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-62nz7" podStartSLOduration=164.297729449 podStartE2EDuration="2m44.297729449s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.29532226 +0000 UTC m=+229.718025101" watchObservedRunningTime="2026-03-20 15:41:40.297729449 +0000 UTC m=+229.720432290" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.322341 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bfhd6" podStartSLOduration=164.322320751 podStartE2EDuration="2m44.322320751s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.317999445 +0000 UTC m=+229.740702286" watchObservedRunningTime="2026-03-20 15:41:40.322320751 +0000 UTC m=+229.745023592" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.325806 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.326077 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.826061753 +0000 UTC m=+230.248764594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.368613 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" podStartSLOduration=164.368597126 podStartE2EDuration="2m44.368597126s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.368138404 +0000 UTC m=+229.790841245" watchObservedRunningTime="2026-03-20 15:41:40.368597126 +0000 UTC m=+229.791299967" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.369360 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" podStartSLOduration=164.369354154 podStartE2EDuration="2m44.369354154s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.349345064 +0000 UTC m=+229.772047905" watchObservedRunningTime="2026-03-20 15:41:40.369354154 +0000 UTC m=+229.792056995" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.386347 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" podStartSLOduration=164.38633274 podStartE2EDuration="2m44.38633274s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:40.386021113 +0000 UTC m=+229.808723954" watchObservedRunningTime="2026-03-20 15:41:40.38633274 +0000 UTC m=+229.809035581" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.426884 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.427227 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:40.927215292 +0000 UTC m=+230.349918133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.527647 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.527818 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.027792688 +0000 UTC m=+230.450495529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.528144 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.528514 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.028504855 +0000 UTC m=+230.451207696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.629463 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.629632 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.129599544 +0000 UTC m=+230.552302385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.629798 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.630140 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.130132507 +0000 UTC m=+230.552835348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.731340 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.731530 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.231504311 +0000 UTC m=+230.654207152 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.731600 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.731925 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.231913721 +0000 UTC m=+230.654616562 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.816473 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dlbqb" Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.832918 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.833206 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.333188604 +0000 UTC m=+230.755891445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:40 crc kubenswrapper[4813]: I0320 15:41:40.933803 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:40 crc kubenswrapper[4813]: E0320 15:41:40.934130 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.434116398 +0000 UTC m=+230.856819239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.034914 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.035117 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.535092182 +0000 UTC m=+230.957795023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.035274 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.035651 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.535639365 +0000 UTC m=+230.958342206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.114135 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" event={"ID":"e6e86644-c67e-459b-9ad7-9f58409329a8","Type":"ContainerStarted","Data":"e0c5a3678a692bf12407c65a6715706d121d2434695cb745475dcbbace270a06"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.114180 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" event={"ID":"e6e86644-c67e-459b-9ad7-9f58409329a8","Type":"ContainerStarted","Data":"44f8db2df9f306ee19af82d1bbd8cfd95c51496e1d5fdd49688c9b00b61df96f"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.124343 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dnc2j" event={"ID":"dcd2f0d3-cdac-480d-91cd-88c8a78005ba","Type":"ContainerStarted","Data":"263ad19b5c2c30c39605f2092e76a6c1184f6cb2d251bea9df7f83b9baaa8ebe"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.124909 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.126921 4813 generic.go:334] "Generic (PLEG): container finished" podID="c8878ff8-6310-45c3-adcb-116e36a2dbde" containerID="d1b0e96f44f5cc0fa2eacab059b56866043aadc93aea98dc70b7a30ef5639e85" exitCode=0 Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.126990 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" event={"ID":"c8878ff8-6310-45c3-adcb-116e36a2dbde","Type":"ContainerDied","Data":"d1b0e96f44f5cc0fa2eacab059b56866043aadc93aea98dc70b7a30ef5639e85"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.129908 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" event={"ID":"1d44b082-02e7-4ef0-9242-cbf1b709e91b","Type":"ContainerStarted","Data":"3bc7d96e390f2c489efcd57f53ba26c52eb2015aca69f090b849b1e78a659256"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.130442 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.131981 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" event={"ID":"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3","Type":"ContainerStarted","Data":"8176d496033a16b22ea80b01fe60349cdfe6c473525bb2653104bb493f159c4c"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.135692 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.136024 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.636010686 +0000 UTC m=+231.058713527 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.144157 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-62k4m" podStartSLOduration=165.144143855 podStartE2EDuration="2m45.144143855s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:41.141013648 +0000 UTC m=+230.563716489" watchObservedRunningTime="2026-03-20 15:41:41.144143855 +0000 UTC m=+230.566846696" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.177890 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" event={"ID":"1604f02d-e1f2-4ff6-8964-e038f8a64864","Type":"ContainerStarted","Data":"c5d44ea791890835873861b0fdd9cf28ba087f849cd4e3b3b6ca4a85be6ebabb"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.178656 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.185696 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-m787j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.185745 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m787j" podUID="1be7a1f1-df63-450b-aa07-3b5f76e9b6f7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.186084 4813 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-crtz5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.186104 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.192006 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" podStartSLOduration=165.191991878 podStartE2EDuration="2m45.191991878s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:41.191617979 +0000 UTC m=+230.614320820" watchObservedRunningTime="2026-03-20 15:41:41.191991878 +0000 UTC m=+230.614694719" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.185166 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" event={"ID":"df489f65-0ef7-4fd8-a402-9e816dd5620d","Type":"ContainerStarted","Data":"fdfdee4280a60c0307bf86b4338bf59ecac5f52a686036892c2f2617464f1116"} Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.201922 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-tlhjn" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.222658 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-dnc2j" podStartSLOduration=9.222643269 podStartE2EDuration="9.222643269s" podCreationTimestamp="2026-03-20 15:41:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:41.221168483 +0000 UTC m=+230.643871324" watchObservedRunningTime="2026-03-20 15:41:41.222643269 +0000 UTC m=+230.645346110" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.227454 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fqp6p" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.239075 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.265077 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.765062299 +0000 UTC m=+231.187765140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.266210 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" podStartSLOduration=166.266182956 podStartE2EDuration="2m46.266182956s" podCreationTimestamp="2026-03-20 15:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:41.257294219 +0000 UTC m=+230.679997060" watchObservedRunningTime="2026-03-20 15:41:41.266182956 +0000 UTC m=+230.688885787" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.275843 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:41 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:41 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:41 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.275891 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.302802 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-d4pzs" podStartSLOduration=165.302781304 podStartE2EDuration="2m45.302781304s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:41.298821967 +0000 UTC m=+230.721524808" watchObservedRunningTime="2026-03-20 15:41:41.302781304 +0000 UTC m=+230.725484145" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.345807 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.346167 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.846149577 +0000 UTC m=+231.268852418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.447388 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.447765 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:41.947751247 +0000 UTC m=+231.370454098 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.548106 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.548394 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.048379154 +0000 UTC m=+231.471081995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.572635 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-xv5bz" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.649128 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.649534 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.149522213 +0000 UTC m=+231.572225054 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.750436 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.750856 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.250823646 +0000 UTC m=+231.673526487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.750860 4813 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.751115 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.751454 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.251441331 +0000 UTC m=+231.674144232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.852344 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.852696 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.352681973 +0000 UTC m=+231.775384814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.953528 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:41 crc kubenswrapper[4813]: E0320 15:41:41.953894 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.453882374 +0000 UTC m=+231.876585215 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.956787 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-46qtn"] Mar 20 15:41:41 crc kubenswrapper[4813]: I0320 15:41:41.956967 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" podUID="a55eeaca-e582-46e3-8f26-3a72cdcced4f" containerName="controller-manager" containerID="cri-o://04c8d1b1b0554baeed53a1603b54e133f53aad86281d466046736258c307a70b" gracePeriod=30 Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.014731 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4"] Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.054063 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.054175 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.554154212 +0000 UTC m=+231.976857053 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.054304 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.054596 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.554588513 +0000 UTC m=+231.977291354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.155369 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.155637 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.655624669 +0000 UTC m=+232.078327510 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.155931 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.156288 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.656273595 +0000 UTC m=+232.078976436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.204956 4813 generic.go:334] "Generic (PLEG): container finished" podID="a55eeaca-e582-46e3-8f26-3a72cdcced4f" containerID="04c8d1b1b0554baeed53a1603b54e133f53aad86281d466046736258c307a70b" exitCode=0 Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.205039 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" event={"ID":"a55eeaca-e582-46e3-8f26-3a72cdcced4f","Type":"ContainerDied","Data":"04c8d1b1b0554baeed53a1603b54e133f53aad86281d466046736258c307a70b"} Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.212548 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" event={"ID":"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3","Type":"ContainerStarted","Data":"6fc7c77869e2d164d2d9286d4b5c6949daa8c7392c1951c2af4a1360fc858e02"} Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.212604 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" event={"ID":"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3","Type":"ContainerStarted","Data":"aead54c7e6d191112c3ece7a8b504a6ab1c8cac2807eb27d1f74a01f821374a2"} Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.212618 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" event={"ID":"e41db8d8-3508-40dc-bca8-ecd9ce41a6e3","Type":"ContainerStarted","Data":"a84d514b5a11773be2c30e0a4fb21e047f303978ae74f5340f5f38f23b2409d7"} Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.213630 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-m787j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.213674 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m787j" podUID="1be7a1f1-df63-450b-aa07-3b5f76e9b6f7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.216642 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" podUID="e190ce03-ff83-432a-a092-b7ed2d017aaf" containerName="route-controller-manager" containerID="cri-o://8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6" gracePeriod=30 Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.225041 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.253228 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-m4mtq" podStartSLOduration=10.253206171 podStartE2EDuration="10.253206171s" podCreationTimestamp="2026-03-20 15:41:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:42.252455233 +0000 UTC m=+231.675158074" watchObservedRunningTime="2026-03-20 15:41:42.253206171 +0000 UTC m=+231.675909012" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.260170 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.261025 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.761005582 +0000 UTC m=+232.183708423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.273348 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:42 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:42 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:42 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.273421 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.278806 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-b777z"] Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.280995 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.284119 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.291357 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b777z"] Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.366051 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-catalog-content\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.366131 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.366163 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z49s7\" (UniqueName: \"kubernetes.io/projected/6f6910f6-780a-428c-a21b-f6702a912af1-kube-api-access-z49s7\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.366259 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-utilities\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.366456 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.866439467 +0000 UTC m=+232.289142308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.450279 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.466844 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wsh8b"] Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.467030 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a55eeaca-e582-46e3-8f26-3a72cdcced4f" containerName="controller-manager" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.467041 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a55eeaca-e582-46e3-8f26-3a72cdcced4f" containerName="controller-manager" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.467137 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a55eeaca-e582-46e3-8f26-3a72cdcced4f" containerName="controller-manager" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.467138 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.467385 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-utilities\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.467431 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-catalog-content\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.467462 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z49s7\" (UniqueName: \"kubernetes.io/projected/6f6910f6-780a-428c-a21b-f6702a912af1-kube-api-access-z49s7\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.467784 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 15:41:42.967766681 +0000 UTC m=+232.390469522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.467869 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-catalog-content\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.468143 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-utilities\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.468171 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.474607 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.506970 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wsh8b"] Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.508304 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z49s7\" (UniqueName: \"kubernetes.io/projected/6f6910f6-780a-428c-a21b-f6702a912af1-kube-api-access-z49s7\") pod \"certified-operators-b777z\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.549706 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.567858 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxkxp\" (UniqueName: \"kubernetes.io/projected/a55eeaca-e582-46e3-8f26-3a72cdcced4f-kube-api-access-kxkxp\") pod \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568119 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-client-ca\") pod \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568157 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55eeaca-e582-46e3-8f26-3a72cdcced4f-serving-cert\") pod \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568187 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-proxy-ca-bundles\") pod \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568611 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-config\") pod \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\" (UID: \"a55eeaca-e582-46e3-8f26-3a72cdcced4f\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568758 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vk4d\" (UniqueName: \"kubernetes.io/projected/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-kube-api-access-7vk4d\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568811 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-catalog-content\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568868 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-utilities\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.568913 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.569163 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-client-ca" (OuterVolumeSpecName: "client-ca") pod "a55eeaca-e582-46e3-8f26-3a72cdcced4f" (UID: "a55eeaca-e582-46e3-8f26-3a72cdcced4f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.569532 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 15:41:43.069514935 +0000 UTC m=+232.492217776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b4swq" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.574541 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a55eeaca-e582-46e3-8f26-3a72cdcced4f" (UID: "a55eeaca-e582-46e3-8f26-3a72cdcced4f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.574555 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-config" (OuterVolumeSpecName: "config") pod "a55eeaca-e582-46e3-8f26-3a72cdcced4f" (UID: "a55eeaca-e582-46e3-8f26-3a72cdcced4f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.575031 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a55eeaca-e582-46e3-8f26-3a72cdcced4f-kube-api-access-kxkxp" (OuterVolumeSpecName: "kube-api-access-kxkxp") pod "a55eeaca-e582-46e3-8f26-3a72cdcced4f" (UID: "a55eeaca-e582-46e3-8f26-3a72cdcced4f"). InnerVolumeSpecName "kube-api-access-kxkxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.576118 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a55eeaca-e582-46e3-8f26-3a72cdcced4f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a55eeaca-e582-46e3-8f26-3a72cdcced4f" (UID: "a55eeaca-e582-46e3-8f26-3a72cdcced4f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.612589 4813 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-03-20T15:41:41.75098063Z","Handler":null,"Name":""} Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.618594 4813 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.618629 4813 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.646372 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.665503 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4v5fd"] Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.665682 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8878ff8-6310-45c3-adcb-116e36a2dbde" containerName="collect-profiles" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.665693 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8878ff8-6310-45c3-adcb-116e36a2dbde" containerName="collect-profiles" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.666074 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8878ff8-6310-45c3-adcb-116e36a2dbde" containerName="collect-profiles" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.666848 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.669410 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c8878ff8-6310-45c3-adcb-116e36a2dbde-config-volume\") pod \"c8878ff8-6310-45c3-adcb-116e36a2dbde\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.669505 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg6lq\" (UniqueName: \"kubernetes.io/projected/c8878ff8-6310-45c3-adcb-116e36a2dbde-kube-api-access-kg6lq\") pod \"c8878ff8-6310-45c3-adcb-116e36a2dbde\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.669636 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.669674 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c8878ff8-6310-45c3-adcb-116e36a2dbde-secret-volume\") pod \"c8878ff8-6310-45c3-adcb-116e36a2dbde\" (UID: \"c8878ff8-6310-45c3-adcb-116e36a2dbde\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670023 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vk4d\" (UniqueName: \"kubernetes.io/projected/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-kube-api-access-7vk4d\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670029 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8878ff8-6310-45c3-adcb-116e36a2dbde-config-volume" (OuterVolumeSpecName: "config-volume") pod "c8878ff8-6310-45c3-adcb-116e36a2dbde" (UID: "c8878ff8-6310-45c3-adcb-116e36a2dbde"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670065 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-catalog-content\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670106 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-utilities\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670197 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c8878ff8-6310-45c3-adcb-116e36a2dbde-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670210 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670220 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a55eeaca-e582-46e3-8f26-3a72cdcced4f-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670229 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670240 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a55eeaca-e582-46e3-8f26-3a72cdcced4f-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.670249 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxkxp\" (UniqueName: \"kubernetes.io/projected/a55eeaca-e582-46e3-8f26-3a72cdcced4f-kube-api-access-kxkxp\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.671429 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-utilities\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.672936 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-catalog-content\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.674723 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8878ff8-6310-45c3-adcb-116e36a2dbde-kube-api-access-kg6lq" (OuterVolumeSpecName: "kube-api-access-kg6lq") pod "c8878ff8-6310-45c3-adcb-116e36a2dbde" (UID: "c8878ff8-6310-45c3-adcb-116e36a2dbde"). InnerVolumeSpecName "kube-api-access-kg6lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.675013 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.675414 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4v5fd"] Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.676243 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.676577 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8878ff8-6310-45c3-adcb-116e36a2dbde-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c8878ff8-6310-45c3-adcb-116e36a2dbde" (UID: "c8878ff8-6310-45c3-adcb-116e36a2dbde"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.692321 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vk4d\" (UniqueName: \"kubernetes.io/projected/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-kube-api-access-7vk4d\") pod \"community-operators-wsh8b\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771339 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w4w6\" (UniqueName: \"kubernetes.io/projected/e190ce03-ff83-432a-a092-b7ed2d017aaf-kube-api-access-4w4w6\") pod \"e190ce03-ff83-432a-a092-b7ed2d017aaf\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771410 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-client-ca\") pod \"e190ce03-ff83-432a-a092-b7ed2d017aaf\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771448 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e190ce03-ff83-432a-a092-b7ed2d017aaf-serving-cert\") pod \"e190ce03-ff83-432a-a092-b7ed2d017aaf\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771611 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-config\") pod \"e190ce03-ff83-432a-a092-b7ed2d017aaf\" (UID: \"e190ce03-ff83-432a-a092-b7ed2d017aaf\") " Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771814 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771909 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-utilities\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771955 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qjfj\" (UniqueName: \"kubernetes.io/projected/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-kube-api-access-2qjfj\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.771997 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-catalog-content\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.772035 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg6lq\" (UniqueName: \"kubernetes.io/projected/c8878ff8-6310-45c3-adcb-116e36a2dbde-kube-api-access-kg6lq\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.772053 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c8878ff8-6310-45c3-adcb-116e36a2dbde-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.772609 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-client-ca" (OuterVolumeSpecName: "client-ca") pod "e190ce03-ff83-432a-a092-b7ed2d017aaf" (UID: "e190ce03-ff83-432a-a092-b7ed2d017aaf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.773019 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-config" (OuterVolumeSpecName: "config") pod "e190ce03-ff83-432a-a092-b7ed2d017aaf" (UID: "e190ce03-ff83-432a-a092-b7ed2d017aaf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.778382 4813 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.778691 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.784071 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e190ce03-ff83-432a-a092-b7ed2d017aaf-kube-api-access-4w4w6" (OuterVolumeSpecName: "kube-api-access-4w4w6") pod "e190ce03-ff83-432a-a092-b7ed2d017aaf" (UID: "e190ce03-ff83-432a-a092-b7ed2d017aaf"). InnerVolumeSpecName "kube-api-access-4w4w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.784098 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e190ce03-ff83-432a-a092-b7ed2d017aaf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e190ce03-ff83-432a-a092-b7ed2d017aaf" (UID: "e190ce03-ff83-432a-a092-b7ed2d017aaf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.802758 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.810810 4813 ???:1] "http: TLS handshake error from 192.168.126.11:48798: no serving certificate available for the kubelet" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.823246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b4swq\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876186 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-utilities\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876246 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qjfj\" (UniqueName: \"kubernetes.io/projected/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-kube-api-access-2qjfj\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876295 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-catalog-content\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876409 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876425 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w4w6\" (UniqueName: \"kubernetes.io/projected/e190ce03-ff83-432a-a092-b7ed2d017aaf-kube-api-access-4w4w6\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876437 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e190ce03-ff83-432a-a092-b7ed2d017aaf-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876447 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e190ce03-ff83-432a-a092-b7ed2d017aaf-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876753 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-utilities\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.876837 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-catalog-content\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.882205 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qcm7m"] Mar 20 15:41:42 crc kubenswrapper[4813]: E0320 15:41:42.882386 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e190ce03-ff83-432a-a092-b7ed2d017aaf" containerName="route-controller-manager" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.882396 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e190ce03-ff83-432a-a092-b7ed2d017aaf" containerName="route-controller-manager" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.882500 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e190ce03-ff83-432a-a092-b7ed2d017aaf" containerName="route-controller-manager" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.883086 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.894729 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qcm7m"] Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.897123 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qjfj\" (UniqueName: \"kubernetes.io/projected/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-kube-api-access-2qjfj\") pod \"certified-operators-4v5fd\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.919549 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b777z"] Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.978040 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-catalog-content\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.978139 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-utilities\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.978173 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjnhg\" (UniqueName: \"kubernetes.io/projected/deacbab9-8fbd-4707-8098-5d748d77caae-kube-api-access-bjnhg\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:42 crc kubenswrapper[4813]: I0320 15:41:42.995857 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.045334 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.064380 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wsh8b"] Mar 20 15:41:43 crc kubenswrapper[4813]: W0320 15:41:43.071945 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6121b8b4_c0d0_4b17_8f4e_8c4a3392ad3f.slice/crio-dc4dff779584a325ff3cb25eb59c9a1b997cad6b681800bfc02b4d38ea7c6cc5 WatchSource:0}: Error finding container dc4dff779584a325ff3cb25eb59c9a1b997cad6b681800bfc02b4d38ea7c6cc5: Status 404 returned error can't find the container with id dc4dff779584a325ff3cb25eb59c9a1b997cad6b681800bfc02b4d38ea7c6cc5 Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.078953 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-catalog-content\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.079098 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-utilities\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.079207 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjnhg\" (UniqueName: \"kubernetes.io/projected/deacbab9-8fbd-4707-8098-5d748d77caae-kube-api-access-bjnhg\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.079566 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-catalog-content\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.079807 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-utilities\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.100675 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjnhg\" (UniqueName: \"kubernetes.io/projected/deacbab9-8fbd-4707-8098-5d748d77caae-kube-api-access-bjnhg\") pod \"community-operators-qcm7m\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.220781 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.225498 4813 generic.go:334] "Generic (PLEG): container finished" podID="6f6910f6-780a-428c-a21b-f6702a912af1" containerID="5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c" exitCode=0 Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.225571 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b777z" event={"ID":"6f6910f6-780a-428c-a21b-f6702a912af1","Type":"ContainerDied","Data":"5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c"} Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.225598 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b777z" event={"ID":"6f6910f6-780a-428c-a21b-f6702a912af1","Type":"ContainerStarted","Data":"354b2c37d8f887b53dd812c768944bf6901dae294178cb9a243874681b1a01ad"} Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.228377 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" event={"ID":"c8878ff8-6310-45c3-adcb-116e36a2dbde","Type":"ContainerDied","Data":"5f91945f69f54ae738de1cb86d53f5d4ae07b916657cfb061d8282b65f62ce91"} Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.228387 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.228399 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f91945f69f54ae738de1cb86d53f5d4ae07b916657cfb061d8282b65f62ce91" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.241436 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4v5fd"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.245129 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" event={"ID":"a55eeaca-e582-46e3-8f26-3a72cdcced4f","Type":"ContainerDied","Data":"0ca51d7ff11ef4730889fe22cfe28059bb083b6fed29361e1669830ab0fcbf7f"} Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.245177 4813 scope.go:117] "RemoveContainer" containerID="04c8d1b1b0554baeed53a1603b54e133f53aad86281d466046736258c307a70b" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.245220 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-46qtn" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.249564 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wsh8b" event={"ID":"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f","Type":"ContainerStarted","Data":"dc4dff779584a325ff3cb25eb59c9a1b997cad6b681800bfc02b4d38ea7c6cc5"} Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.251981 4813 generic.go:334] "Generic (PLEG): container finished" podID="e190ce03-ff83-432a-a092-b7ed2d017aaf" containerID="8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6" exitCode=0 Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.253326 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.253409 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" event={"ID":"e190ce03-ff83-432a-a092-b7ed2d017aaf","Type":"ContainerDied","Data":"8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6"} Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.253643 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4" event={"ID":"e190ce03-ff83-432a-a092-b7ed2d017aaf","Type":"ContainerDied","Data":"2095fd3cb97b33265e767196f881d137a84387741fcb30cf16902e0d8d0611b5"} Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.259714 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xzds4" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.265887 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:43 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:43 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:43 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.265934 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.276305 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.281631 4813 scope.go:117] "RemoveContainer" containerID="8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.299754 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b4swq"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.314546 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.319352 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zcdj4"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.324828 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-46qtn"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.326405 4813 scope.go:117] "RemoveContainer" containerID="8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6" Mar 20 15:41:43 crc kubenswrapper[4813]: E0320 15:41:43.327069 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6\": container with ID starting with 8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6 not found: ID does not exist" containerID="8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.327131 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6"} err="failed to get container status \"8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6\": rpc error: code = NotFound desc = could not find container \"8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6\": container with ID starting with 8481e33246ae46c5bdba1c04e3734fdafe6078f32af774d0a48c93b3aed025c6 not found: ID does not exist" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.328336 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-46qtn"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.378208 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.378919 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.383359 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.384079 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.385943 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.386225 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.387284 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.387399 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.387571 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.387404 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.388469 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.388583 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.388725 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.388843 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.388872 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.388994 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.389144 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:41:43 crc kubenswrapper[4813]: W0320 15:41:43.390736 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17ddafd0_c19d_4c6a_a75f_70b85668c360.slice/crio-5eada1b239333935c858557a45eb87955f1fa2845dc50151142030c3ff1a9cfe WatchSource:0}: Error finding container 5eada1b239333935c858557a45eb87955f1fa2845dc50151142030c3ff1a9cfe: Status 404 returned error can't find the container with id 5eada1b239333935c858557a45eb87955f1fa2845dc50151142030c3ff1a9cfe Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.393713 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.399105 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd"] Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489471 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-client-ca\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489566 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59cvm\" (UniqueName: \"kubernetes.io/projected/03e80c16-8460-4458-9bf8-af5f1b85e487-kube-api-access-59cvm\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489601 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03e80c16-8460-4458-9bf8-af5f1b85e487-serving-cert\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489618 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-config\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489695 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-proxy-ca-bundles\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489735 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f7pm\" (UniqueName: \"kubernetes.io/projected/252b6d65-ea4a-4077-9e41-8a0acc068c13-kube-api-access-7f7pm\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489760 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-config\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489809 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/252b6d65-ea4a-4077-9e41-8a0acc068c13-serving-cert\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.489845 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-client-ca\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.492713 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qcm7m"] Mar 20 15:41:43 crc kubenswrapper[4813]: W0320 15:41:43.501888 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddeacbab9_8fbd_4707_8098_5d748d77caae.slice/crio-b08c564b2e3c550b6ffa72ed30e06f579f11b473e1cfb4f710463fe107501e34 WatchSource:0}: Error finding container b08c564b2e3c550b6ffa72ed30e06f579f11b473e1cfb4f710463fe107501e34: Status 404 returned error can't find the container with id b08c564b2e3c550b6ffa72ed30e06f579f11b473e1cfb4f710463fe107501e34 Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591159 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03e80c16-8460-4458-9bf8-af5f1b85e487-serving-cert\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591208 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-config\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591253 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-proxy-ca-bundles\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591286 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f7pm\" (UniqueName: \"kubernetes.io/projected/252b6d65-ea4a-4077-9e41-8a0acc068c13-kube-api-access-7f7pm\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591318 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-config\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591338 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/252b6d65-ea4a-4077-9e41-8a0acc068c13-serving-cert\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591361 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-client-ca\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591383 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-client-ca\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.591456 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59cvm\" (UniqueName: \"kubernetes.io/projected/03e80c16-8460-4458-9bf8-af5f1b85e487-kube-api-access-59cvm\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.593060 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-config\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.594057 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-client-ca\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.594397 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-proxy-ca-bundles\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.597046 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/252b6d65-ea4a-4077-9e41-8a0acc068c13-serving-cert\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.597413 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03e80c16-8460-4458-9bf8-af5f1b85e487-serving-cert\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.597975 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-client-ca\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.599192 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-config\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.611346 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f7pm\" (UniqueName: \"kubernetes.io/projected/252b6d65-ea4a-4077-9e41-8a0acc068c13-kube-api-access-7f7pm\") pod \"controller-manager-7bbdb64b65-vvrmd\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.614576 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59cvm\" (UniqueName: \"kubernetes.io/projected/03e80c16-8460-4458-9bf8-af5f1b85e487-kube-api-access-59cvm\") pod \"route-controller-manager-6bcf54b765-dqlgt\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.714278 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.725037 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:43 crc kubenswrapper[4813]: I0320 15:41:43.959328 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd"] Mar 20 15:41:43 crc kubenswrapper[4813]: W0320 15:41:43.989589 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod252b6d65_ea4a_4077_9e41_8a0acc068c13.slice/crio-438107a116cf45f72c75bccedec6b49c695ae12ae17223b894face50c6f310c3 WatchSource:0}: Error finding container 438107a116cf45f72c75bccedec6b49c695ae12ae17223b894face50c6f310c3: Status 404 returned error can't find the container with id 438107a116cf45f72c75bccedec6b49c695ae12ae17223b894face50c6f310c3 Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.029475 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt"] Mar 20 15:41:44 crc kubenswrapper[4813]: W0320 15:41:44.039703 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03e80c16_8460_4458_9bf8_af5f1b85e487.slice/crio-926faf1b07f0883fa2002671495c2964b5431418585dba81db60a3089b643dd1 WatchSource:0}: Error finding container 926faf1b07f0883fa2002671495c2964b5431418585dba81db60a3089b643dd1: Status 404 returned error can't find the container with id 926faf1b07f0883fa2002671495c2964b5431418585dba81db60a3089b643dd1 Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.263621 4813 generic.go:334] "Generic (PLEG): container finished" podID="deacbab9-8fbd-4707-8098-5d748d77caae" containerID="c9f24b5a705901f9ae9fe4c5dcdb5b9fdf522853c7d18831f0fcb8f3dd42407f" exitCode=0 Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.263698 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcm7m" event={"ID":"deacbab9-8fbd-4707-8098-5d748d77caae","Type":"ContainerDied","Data":"c9f24b5a705901f9ae9fe4c5dcdb5b9fdf522853c7d18831f0fcb8f3dd42407f"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.263729 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcm7m" event={"ID":"deacbab9-8fbd-4707-8098-5d748d77caae","Type":"ContainerStarted","Data":"b08c564b2e3c550b6ffa72ed30e06f579f11b473e1cfb4f710463fe107501e34"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.265471 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:44 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:44 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:44 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.265539 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.278605 4813 generic.go:334] "Generic (PLEG): container finished" podID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerID="91712df359be7aaf64ca7613851142844c18ee10ce667d7d3065e01eb7ec901b" exitCode=0 Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.279864 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4v5fd" event={"ID":"ddf7608c-e3c8-40c2-a92e-98605cd5f35a","Type":"ContainerDied","Data":"91712df359be7aaf64ca7613851142844c18ee10ce667d7d3065e01eb7ec901b"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.279897 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4v5fd" event={"ID":"ddf7608c-e3c8-40c2-a92e-98605cd5f35a","Type":"ContainerStarted","Data":"a7dbe758427684484463248361d018c114c18602a06c9b81882555919afb6adc"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.284691 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" event={"ID":"03e80c16-8460-4458-9bf8-af5f1b85e487","Type":"ContainerStarted","Data":"0b3cf944c60f1baad3a2b07e6a65648dbedc5c72063c2c633ea9d5cb339a3694"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.284724 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" event={"ID":"03e80c16-8460-4458-9bf8-af5f1b85e487","Type":"ContainerStarted","Data":"926faf1b07f0883fa2002671495c2964b5431418585dba81db60a3089b643dd1"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.285669 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.300899 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" event={"ID":"17ddafd0-c19d-4c6a-a75f-70b85668c360","Type":"ContainerStarted","Data":"988b19ec97ba8ff2a40773fce8a0096a435eb5bc76a450907f9b98c48e012f1d"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.300948 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" event={"ID":"17ddafd0-c19d-4c6a-a75f-70b85668c360","Type":"ContainerStarted","Data":"5eada1b239333935c858557a45eb87955f1fa2845dc50151142030c3ff1a9cfe"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.301093 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.325787 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" podStartSLOduration=168.325767065 podStartE2EDuration="2m48.325767065s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:44.322164597 +0000 UTC m=+233.744867458" watchObservedRunningTime="2026-03-20 15:41:44.325767065 +0000 UTC m=+233.748469906" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.326315 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" event={"ID":"252b6d65-ea4a-4077-9e41-8a0acc068c13","Type":"ContainerStarted","Data":"1beff2ebaa265088c62245eb85118aa523a16d0ea80a6c8939693b22efe387ed"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.326355 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" event={"ID":"252b6d65-ea4a-4077-9e41-8a0acc068c13","Type":"ContainerStarted","Data":"438107a116cf45f72c75bccedec6b49c695ae12ae17223b894face50c6f310c3"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.327267 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.336708 4813 generic.go:334] "Generic (PLEG): container finished" podID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerID="a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760" exitCode=0 Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.336981 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wsh8b" event={"ID":"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f","Type":"ContainerDied","Data":"a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760"} Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.339713 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.352289 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" podStartSLOduration=2.352256444 podStartE2EDuration="2.352256444s" podCreationTimestamp="2026-03-20 15:41:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:44.349381694 +0000 UTC m=+233.772084555" watchObservedRunningTime="2026-03-20 15:41:44.352256444 +0000 UTC m=+233.774959305" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.383831 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" podStartSLOduration=2.383812298 podStartE2EDuration="2.383812298s" podCreationTimestamp="2026-03-20 15:41:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:44.380530448 +0000 UTC m=+233.803233309" watchObservedRunningTime="2026-03-20 15:41:44.383812298 +0000 UTC m=+233.806515139" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.466401 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-grn6l"] Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.467597 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.473517 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.488847 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-grn6l"] Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.511114 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-catalog-content\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.511164 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qv4n\" (UniqueName: \"kubernetes.io/projected/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-kube-api-access-6qv4n\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.511192 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-utilities\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.553969 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.612279 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.612388 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-catalog-content\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.612459 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qv4n\" (UniqueName: \"kubernetes.io/projected/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-kube-api-access-6qv4n\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.612507 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-utilities\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.613136 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-utilities\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.613425 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-catalog-content\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.616673 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-h2fdd" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.635322 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qv4n\" (UniqueName: \"kubernetes.io/projected/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-kube-api-access-6qv4n\") pod \"redhat-marketplace-grn6l\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.793723 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.872835 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pcdr6"] Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.873784 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.891842 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcdr6"] Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.916863 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-utilities\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.916942 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-catalog-content\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:44 crc kubenswrapper[4813]: I0320 15:41:44.916971 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zcht\" (UniqueName: \"kubernetes.io/projected/f499bfc2-7181-42d0-8e36-16bba273f3f5-kube-api-access-2zcht\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.023405 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-catalog-content\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.023809 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zcht\" (UniqueName: \"kubernetes.io/projected/f499bfc2-7181-42d0-8e36-16bba273f3f5-kube-api-access-2zcht\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.023932 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-utilities\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.024627 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-utilities\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.024946 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-catalog-content\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.046860 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zcht\" (UniqueName: \"kubernetes.io/projected/f499bfc2-7181-42d0-8e36-16bba273f3f5-kube-api-access-2zcht\") pod \"redhat-marketplace-pcdr6\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.189220 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.189528 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.190150 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.191823 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.192411 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.194327 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.226598 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d8518e7-60ec-45a4-96da-b1cea470296a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.226648 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2d8518e7-60ec-45a4-96da-b1cea470296a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.261182 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.262569 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-grn6l"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.264061 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:45 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:45 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:45 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.264097 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.282851 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a55eeaca-e582-46e3-8f26-3a72cdcced4f" path="/var/lib/kubelet/pods/a55eeaca-e582-46e3-8f26-3a72cdcced4f/volumes" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.283497 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e190ce03-ff83-432a-a092-b7ed2d017aaf" path="/var/lib/kubelet/pods/e190ce03-ff83-432a-a092-b7ed2d017aaf/volumes" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.330300 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d8518e7-60ec-45a4-96da-b1cea470296a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.330377 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2d8518e7-60ec-45a4-96da-b1cea470296a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.331493 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2d8518e7-60ec-45a4-96da-b1cea470296a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.390632 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d8518e7-60ec-45a4-96da-b1cea470296a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.411785 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grn6l" event={"ID":"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705","Type":"ContainerStarted","Data":"3eb278335fa28c5b292a5f20e44852a7a12c0901b223128a6db35ee26dafdc74"} Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.447800 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcdr6"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.452565 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.452602 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.460418 4813 patch_prober.go:28] interesting pod/console-f9d7485db-gsjbb container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.460517 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-gsjbb" podUID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.464003 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wv9qh"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.466635 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: W0320 15:41:45.471412 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf499bfc2_7181_42d0_8e36_16bba273f3f5.slice/crio-873a2c5597ebdf9708d12b96fa55a3fde5f66edc627c36522dbe076f6cb09cab WatchSource:0}: Error finding container 873a2c5597ebdf9708d12b96fa55a3fde5f66edc627c36522dbe076f6cb09cab: Status 404 returned error can't find the container with id 873a2c5597ebdf9708d12b96fa55a3fde5f66edc627c36522dbe076f6cb09cab Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.473071 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.475878 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wv9qh"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.509594 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.535007 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtt8x\" (UniqueName: \"kubernetes.io/projected/7c3f7704-b939-4e16-b4fb-1addffc3091d-kube-api-access-vtt8x\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.535241 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-utilities\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.535504 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-catalog-content\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.638396 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-utilities\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.638514 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-catalog-content\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.638552 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtt8x\" (UniqueName: \"kubernetes.io/projected/7c3f7704-b939-4e16-b4fb-1addffc3091d-kube-api-access-vtt8x\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.638892 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-utilities\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.639121 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-catalog-content\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.654763 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtt8x\" (UniqueName: \"kubernetes.io/projected/7c3f7704-b939-4e16-b4fb-1addffc3091d-kube-api-access-vtt8x\") pod \"redhat-operators-wv9qh\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.705830 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.795937 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.859917 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rxzdv"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.862636 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.873184 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rxzdv"] Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.901956 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-m787j container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.902011 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-m787j" podUID="1be7a1f1-df63-450b-aa07-3b5f76e9b6f7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.904141 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-m787j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.904194 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m787j" podUID="1be7a1f1-df63-450b-aa07-3b5f76e9b6f7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.947185 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-utilities\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.947273 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdmrv\" (UniqueName: \"kubernetes.io/projected/dafe7a17-1180-4e97-a6bc-309258d024b6-kube-api-access-cdmrv\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:45 crc kubenswrapper[4813]: I0320 15:41:45.947293 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-catalog-content\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.048293 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-utilities\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.048648 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdmrv\" (UniqueName: \"kubernetes.io/projected/dafe7a17-1180-4e97-a6bc-309258d024b6-kube-api-access-cdmrv\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.048671 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-catalog-content\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.048911 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-utilities\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.049133 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-catalog-content\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.073552 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdmrv\" (UniqueName: \"kubernetes.io/projected/dafe7a17-1180-4e97-a6bc-309258d024b6-kube-api-access-cdmrv\") pod \"redhat-operators-rxzdv\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.196762 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.264066 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nr4cq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 15:41:46 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Mar 20 15:41:46 crc kubenswrapper[4813]: [+]process-running ok Mar 20 15:41:46 crc kubenswrapper[4813]: healthz check failed Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.264139 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nr4cq" podUID="3ab628f6-a5aa-4cf7-af1e-774587397924" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.320420 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wv9qh"] Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.404967 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.414271 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.420446 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.420815 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.420957 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.438997 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2d8518e7-60ec-45a4-96da-b1cea470296a","Type":"ContainerStarted","Data":"13eb8e87130bb75fef896011c5a6c3ac90aabfb14f2f988b3f977c3074ef7adb"} Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.439047 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2d8518e7-60ec-45a4-96da-b1cea470296a","Type":"ContainerStarted","Data":"8d741bb9c533c76590e039da75b59acffff0cdb98d34d16d2c93b4da13fb831b"} Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.444875 4813 generic.go:334] "Generic (PLEG): container finished" podID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerID="5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1" exitCode=0 Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.444936 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcdr6" event={"ID":"f499bfc2-7181-42d0-8e36-16bba273f3f5","Type":"ContainerDied","Data":"5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1"} Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.444956 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcdr6" event={"ID":"f499bfc2-7181-42d0-8e36-16bba273f3f5","Type":"ContainerStarted","Data":"873a2c5597ebdf9708d12b96fa55a3fde5f66edc627c36522dbe076f6cb09cab"} Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.449970 4813 generic.go:334] "Generic (PLEG): container finished" podID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerID="a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2" exitCode=0 Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.450015 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grn6l" event={"ID":"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705","Type":"ContainerDied","Data":"a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2"} Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.456334 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.456317278 podStartE2EDuration="1.456317278s" podCreationTimestamp="2026-03-20 15:41:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:41:46.45518739 +0000 UTC m=+235.877890241" watchObservedRunningTime="2026-03-20 15:41:46.456317278 +0000 UTC m=+235.879020119" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.457410 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2241316-a31e-4144-80db-7e75c432c8d1-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.457567 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2241316-a31e-4144-80db-7e75c432c8d1-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.558398 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2241316-a31e-4144-80db-7e75c432c8d1-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.558860 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2241316-a31e-4144-80db-7e75c432c8d1-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.559145 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2241316-a31e-4144-80db-7e75c432c8d1-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.576868 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2241316-a31e-4144-80db-7e75c432c8d1-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.737139 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:41:46 crc kubenswrapper[4813]: I0320 15:41:46.761598 4813 ???:1] "http: TLS handshake error from 192.168.126.11:48810: no serving certificate available for the kubelet" Mar 20 15:41:47 crc kubenswrapper[4813]: I0320 15:41:47.263392 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:47 crc kubenswrapper[4813]: I0320 15:41:47.276565 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-nr4cq" Mar 20 15:41:47 crc kubenswrapper[4813]: I0320 15:41:47.469277 4813 generic.go:334] "Generic (PLEG): container finished" podID="2d8518e7-60ec-45a4-96da-b1cea470296a" containerID="13eb8e87130bb75fef896011c5a6c3ac90aabfb14f2f988b3f977c3074ef7adb" exitCode=0 Mar 20 15:41:47 crc kubenswrapper[4813]: I0320 15:41:47.472784 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2d8518e7-60ec-45a4-96da-b1cea470296a","Type":"ContainerDied","Data":"13eb8e87130bb75fef896011c5a6c3ac90aabfb14f2f988b3f977c3074ef7adb"} Mar 20 15:41:47 crc kubenswrapper[4813]: I0320 15:41:47.955305 4813 ???:1] "http: TLS handshake error from 192.168.126.11:48818: no serving certificate available for the kubelet" Mar 20 15:41:51 crc kubenswrapper[4813]: I0320 15:41:51.123026 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-dnc2j" Mar 20 15:41:53 crc kubenswrapper[4813]: W0320 15:41:53.209376 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c3f7704_b939_4e16_b4fb_1addffc3091d.slice/crio-4177cc3b1bb1d493ec4e172b0295e6941d7f5bdc60870fd4ae3c181ac3f118c9 WatchSource:0}: Error finding container 4177cc3b1bb1d493ec4e172b0295e6941d7f5bdc60870fd4ae3c181ac3f118c9: Status 404 returned error can't find the container with id 4177cc3b1bb1d493ec4e172b0295e6941d7f5bdc60870fd4ae3c181ac3f118c9 Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.242114 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.371554 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d8518e7-60ec-45a4-96da-b1cea470296a-kube-api-access\") pod \"2d8518e7-60ec-45a4-96da-b1cea470296a\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.372062 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2d8518e7-60ec-45a4-96da-b1cea470296a-kubelet-dir\") pod \"2d8518e7-60ec-45a4-96da-b1cea470296a\" (UID: \"2d8518e7-60ec-45a4-96da-b1cea470296a\") " Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.372175 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2d8518e7-60ec-45a4-96da-b1cea470296a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2d8518e7-60ec-45a4-96da-b1cea470296a" (UID: "2d8518e7-60ec-45a4-96da-b1cea470296a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.372882 4813 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2d8518e7-60ec-45a4-96da-b1cea470296a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.376921 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d8518e7-60ec-45a4-96da-b1cea470296a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2d8518e7-60ec-45a4-96da-b1cea470296a" (UID: "2d8518e7-60ec-45a4-96da-b1cea470296a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.474419 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2d8518e7-60ec-45a4-96da-b1cea470296a-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.507072 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2d8518e7-60ec-45a4-96da-b1cea470296a","Type":"ContainerDied","Data":"8d741bb9c533c76590e039da75b59acffff0cdb98d34d16d2c93b4da13fb831b"} Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.507114 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.507118 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d741bb9c533c76590e039da75b59acffff0cdb98d34d16d2c93b4da13fb831b" Mar 20 15:41:53 crc kubenswrapper[4813]: I0320 15:41:53.508834 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wv9qh" event={"ID":"7c3f7704-b939-4e16-b4fb-1addffc3091d","Type":"ContainerStarted","Data":"4177cc3b1bb1d493ec4e172b0295e6941d7f5bdc60870fd4ae3c181ac3f118c9"} Mar 20 15:41:54 crc kubenswrapper[4813]: I0320 15:41:54.892747 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:54 crc kubenswrapper[4813]: I0320 15:41:54.895451 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Mar 20 15:41:54 crc kubenswrapper[4813]: I0320 15:41:54.915321 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e5f925a-75e4-485f-9d5e-2be4c2c13616-metrics-certs\") pod \"network-metrics-daemon-lc5px\" (UID: \"5e5f925a-75e4-485f-9d5e-2be4c2c13616\") " pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:54 crc kubenswrapper[4813]: I0320 15:41:54.994878 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Mar 20 15:41:55 crc kubenswrapper[4813]: I0320 15:41:55.004143 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lc5px" Mar 20 15:41:55 crc kubenswrapper[4813]: I0320 15:41:55.456650 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:55 crc kubenswrapper[4813]: I0320 15:41:55.461065 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:41:55 crc kubenswrapper[4813]: I0320 15:41:55.907405 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-m787j" Mar 20 15:41:59 crc kubenswrapper[4813]: I0320 15:41:59.602800 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rxzdv"] Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.124813 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567022-zfcwm"] Mar 20 15:42:00 crc kubenswrapper[4813]: E0320 15:42:00.125326 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d8518e7-60ec-45a4-96da-b1cea470296a" containerName="pruner" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.125339 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d8518e7-60ec-45a4-96da-b1cea470296a" containerName="pruner" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.125442 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d8518e7-60ec-45a4-96da-b1cea470296a" containerName="pruner" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.125830 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567022-zfcwm" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.130041 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.134086 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567022-zfcwm"] Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.274276 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flhpw\" (UniqueName: \"kubernetes.io/projected/9138f2f5-c58b-4256-88e6-d3c52d034cf4-kube-api-access-flhpw\") pod \"auto-csr-approver-29567022-zfcwm\" (UID: \"9138f2f5-c58b-4256-88e6-d3c52d034cf4\") " pod="openshift-infra/auto-csr-approver-29567022-zfcwm" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.375762 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flhpw\" (UniqueName: \"kubernetes.io/projected/9138f2f5-c58b-4256-88e6-d3c52d034cf4-kube-api-access-flhpw\") pod \"auto-csr-approver-29567022-zfcwm\" (UID: \"9138f2f5-c58b-4256-88e6-d3c52d034cf4\") " pod="openshift-infra/auto-csr-approver-29567022-zfcwm" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.398269 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flhpw\" (UniqueName: \"kubernetes.io/projected/9138f2f5-c58b-4256-88e6-d3c52d034cf4-kube-api-access-flhpw\") pod \"auto-csr-approver-29567022-zfcwm\" (UID: \"9138f2f5-c58b-4256-88e6-d3c52d034cf4\") " pod="openshift-infra/auto-csr-approver-29567022-zfcwm" Mar 20 15:42:00 crc kubenswrapper[4813]: I0320 15:42:00.454654 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567022-zfcwm" Mar 20 15:42:01 crc kubenswrapper[4813]: I0320 15:42:01.701447 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt"] Mar 20 15:42:01 crc kubenswrapper[4813]: I0320 15:42:01.701959 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" podUID="03e80c16-8460-4458-9bf8-af5f1b85e487" containerName="route-controller-manager" containerID="cri-o://0b3cf944c60f1baad3a2b07e6a65648dbedc5c72063c2c633ea9d5cb339a3694" gracePeriod=30 Mar 20 15:42:01 crc kubenswrapper[4813]: I0320 15:42:01.717325 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd"] Mar 20 15:42:01 crc kubenswrapper[4813]: I0320 15:42:01.717535 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" podUID="252b6d65-ea4a-4077-9e41-8a0acc068c13" containerName="controller-manager" containerID="cri-o://1beff2ebaa265088c62245eb85118aa523a16d0ea80a6c8939693b22efe387ed" gracePeriod=30 Mar 20 15:42:02 crc kubenswrapper[4813]: E0320 15:42:02.380565 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift4/ose-cli:latest" Mar 20 15:42:02 crc kubenswrapper[4813]: E0320 15:42:02.380740 4813 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 15:42:02 crc kubenswrapper[4813]: container &Container{Name:oc,Image:registry.redhat.io/openshift4/ose-cli:latest,Command:[/bin/bash -c oc get csr -o go-template='{{range .items}}{{if not .status}}{{.metadata.name}}{{"\n"}}{{end}}{{end}}' | xargs --no-run-if-empty oc adm certificate approve Mar 20 15:42:02 crc kubenswrapper[4813]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cmkf9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod auto-csr-approver-29567020-4l7qk_openshift-infra(e4d83813-6127-4a79-ad93-bd5cafe64abd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled Mar 20 15:42:02 crc kubenswrapper[4813]: > logger="UnhandledError" Mar 20 15:42:02 crc kubenswrapper[4813]: E0320 15:42:02.381851 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" podUID="e4d83813-6127-4a79-ad93-bd5cafe64abd" Mar 20 15:42:02 crc kubenswrapper[4813]: I0320 15:42:02.558818 4813 generic.go:334] "Generic (PLEG): container finished" podID="252b6d65-ea4a-4077-9e41-8a0acc068c13" containerID="1beff2ebaa265088c62245eb85118aa523a16d0ea80a6c8939693b22efe387ed" exitCode=0 Mar 20 15:42:02 crc kubenswrapper[4813]: I0320 15:42:02.558906 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" event={"ID":"252b6d65-ea4a-4077-9e41-8a0acc068c13","Type":"ContainerDied","Data":"1beff2ebaa265088c62245eb85118aa523a16d0ea80a6c8939693b22efe387ed"} Mar 20 15:42:02 crc kubenswrapper[4813]: I0320 15:42:02.561018 4813 generic.go:334] "Generic (PLEG): container finished" podID="03e80c16-8460-4458-9bf8-af5f1b85e487" containerID="0b3cf944c60f1baad3a2b07e6a65648dbedc5c72063c2c633ea9d5cb339a3694" exitCode=0 Mar 20 15:42:02 crc kubenswrapper[4813]: I0320 15:42:02.561186 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" event={"ID":"03e80c16-8460-4458-9bf8-af5f1b85e487","Type":"ContainerDied","Data":"0b3cf944c60f1baad3a2b07e6a65648dbedc5c72063c2c633ea9d5cb339a3694"} Mar 20 15:42:02 crc kubenswrapper[4813]: E0320 15:42:02.562637 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift4/ose-cli:latest\\\"\"" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" podUID="e4d83813-6127-4a79-ad93-bd5cafe64abd" Mar 20 15:42:03 crc kubenswrapper[4813]: I0320 15:42:03.052866 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:42:03 crc kubenswrapper[4813]: I0320 15:42:03.715162 4813 patch_prober.go:28] interesting pod/controller-manager-7bbdb64b65-vvrmd container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.49:8443/healthz\": dial tcp 10.217.0.49:8443: connect: connection refused" start-of-body= Mar 20 15:42:03 crc kubenswrapper[4813]: I0320 15:42:03.715245 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" podUID="252b6d65-ea4a-4077-9e41-8a0acc068c13" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.49:8443/healthz\": dial tcp 10.217.0.49:8443: connect: connection refused" Mar 20 15:42:03 crc kubenswrapper[4813]: I0320 15:42:03.725855 4813 patch_prober.go:28] interesting pod/route-controller-manager-6bcf54b765-dqlgt container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.50:8443/healthz\": dial tcp 10.217.0.50:8443: connect: connection refused" start-of-body= Mar 20 15:42:03 crc kubenswrapper[4813]: I0320 15:42:03.725940 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" podUID="03e80c16-8460-4458-9bf8-af5f1b85e487" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.50:8443/healthz\": dial tcp 10.217.0.50:8443: connect: connection refused" Mar 20 15:42:03 crc kubenswrapper[4813]: I0320 15:42:03.843479 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:42:03 crc kubenswrapper[4813]: I0320 15:42:03.843645 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:42:05 crc kubenswrapper[4813]: I0320 15:42:05.580415 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rxzdv" event={"ID":"dafe7a17-1180-4e97-a6bc-309258d024b6","Type":"ContainerStarted","Data":"aa870f66879f1ebab5792790b6f646b17a7d640065e0fb919d999a090e80ae7c"} Mar 20 15:42:08 crc kubenswrapper[4813]: I0320 15:42:08.453807 4813 ???:1] "http: TLS handshake error from 192.168.126.11:33260: no serving certificate available for the kubelet" Mar 20 15:42:10 crc kubenswrapper[4813]: I0320 15:42:10.810307 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-lc5px"] Mar 20 15:42:10 crc kubenswrapper[4813]: I0320 15:42:10.860976 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Mar 20 15:42:14 crc kubenswrapper[4813]: E0320 15:42:14.006870 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Mar 20 15:42:14 crc kubenswrapper[4813]: E0320 15:42:14.007578 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bjnhg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qcm7m_openshift-marketplace(deacbab9-8fbd-4707-8098-5d748d77caae): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 15:42:14 crc kubenswrapper[4813]: E0320 15:42:14.009387 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qcm7m" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" Mar 20 15:42:14 crc kubenswrapper[4813]: I0320 15:42:14.714824 4813 patch_prober.go:28] interesting pod/controller-manager-7bbdb64b65-vvrmd container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.49:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:42:14 crc kubenswrapper[4813]: I0320 15:42:14.715147 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" podUID="252b6d65-ea4a-4077-9e41-8a0acc068c13" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.49:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:42:14 crc kubenswrapper[4813]: I0320 15:42:14.726294 4813 patch_prober.go:28] interesting pod/route-controller-manager-6bcf54b765-dqlgt container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.50:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 15:42:14 crc kubenswrapper[4813]: I0320 15:42:14.726371 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" podUID="03e80c16-8460-4458-9bf8-af5f1b85e487" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.50:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.547135 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.555299 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.559084 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.559215 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z49s7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-b777z_openshift-marketplace(6f6910f6-780a-428c-a21b-f6702a912af1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.560410 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-b777z" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.589141 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9"] Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.589394 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03e80c16-8460-4458-9bf8-af5f1b85e487" containerName="route-controller-manager" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.589411 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="03e80c16-8460-4458-9bf8-af5f1b85e487" containerName="route-controller-manager" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.589420 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252b6d65-ea4a-4077-9e41-8a0acc068c13" containerName="controller-manager" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.589428 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="252b6d65-ea4a-4077-9e41-8a0acc068c13" containerName="controller-manager" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.589578 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="252b6d65-ea4a-4077-9e41-8a0acc068c13" containerName="controller-manager" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.589587 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="03e80c16-8460-4458-9bf8-af5f1b85e487" containerName="route-controller-manager" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.590226 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.617649 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.617808 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7vk4d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wsh8b_openshift-marketplace(6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.618098 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.618173 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2qjfj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-4v5fd_openshift-marketplace(ddf7608c-e3c8-40c2-a92e-98605cd5f35a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.619298 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-4v5fd" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" Mar 20 15:42:15 crc kubenswrapper[4813]: E0320 15:42:15.619337 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wsh8b" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.619366 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9"] Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.638764 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" event={"ID":"252b6d65-ea4a-4077-9e41-8a0acc068c13","Type":"ContainerDied","Data":"438107a116cf45f72c75bccedec6b49c695ae12ae17223b894face50c6f310c3"} Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.639067 4813 scope.go:117] "RemoveContainer" containerID="1beff2ebaa265088c62245eb85118aa523a16d0ea80a6c8939693b22efe387ed" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.639157 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.641032 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d2241316-a31e-4144-80db-7e75c432c8d1","Type":"ContainerStarted","Data":"1e10597486941ac52b7c772858da49f38674a3ae2dea7f3407ea417de10266dd"} Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.642129 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lc5px" event={"ID":"5e5f925a-75e4-485f-9d5e-2be4c2c13616","Type":"ContainerStarted","Data":"d686d0292b1c4275546118c29e86513f855b91f506c2844b77c40d96d5ca04a9"} Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.644951 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" event={"ID":"03e80c16-8460-4458-9bf8-af5f1b85e487","Type":"ContainerDied","Data":"926faf1b07f0883fa2002671495c2964b5431418585dba81db60a3089b643dd1"} Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.645059 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.726868 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/252b6d65-ea4a-4077-9e41-8a0acc068c13-serving-cert\") pod \"252b6d65-ea4a-4077-9e41-8a0acc068c13\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.726968 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-config\") pod \"03e80c16-8460-4458-9bf8-af5f1b85e487\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.726998 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59cvm\" (UniqueName: \"kubernetes.io/projected/03e80c16-8460-4458-9bf8-af5f1b85e487-kube-api-access-59cvm\") pod \"03e80c16-8460-4458-9bf8-af5f1b85e487\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727016 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03e80c16-8460-4458-9bf8-af5f1b85e487-serving-cert\") pod \"03e80c16-8460-4458-9bf8-af5f1b85e487\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727070 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-client-ca\") pod \"252b6d65-ea4a-4077-9e41-8a0acc068c13\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727101 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f7pm\" (UniqueName: \"kubernetes.io/projected/252b6d65-ea4a-4077-9e41-8a0acc068c13-kube-api-access-7f7pm\") pod \"252b6d65-ea4a-4077-9e41-8a0acc068c13\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727122 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-proxy-ca-bundles\") pod \"252b6d65-ea4a-4077-9e41-8a0acc068c13\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727141 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-client-ca\") pod \"03e80c16-8460-4458-9bf8-af5f1b85e487\" (UID: \"03e80c16-8460-4458-9bf8-af5f1b85e487\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727157 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-config\") pod \"252b6d65-ea4a-4077-9e41-8a0acc068c13\" (UID: \"252b6d65-ea4a-4077-9e41-8a0acc068c13\") " Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727272 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-config\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727301 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fnzt\" (UniqueName: \"kubernetes.io/projected/ccd787bf-c65a-4872-b46d-726bb0f2f690-kube-api-access-2fnzt\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727336 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-client-ca\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.727401 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd787bf-c65a-4872-b46d-726bb0f2f690-serving-cert\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.728464 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-config" (OuterVolumeSpecName: "config") pod "03e80c16-8460-4458-9bf8-af5f1b85e487" (UID: "03e80c16-8460-4458-9bf8-af5f1b85e487"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.728819 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-client-ca" (OuterVolumeSpecName: "client-ca") pod "252b6d65-ea4a-4077-9e41-8a0acc068c13" (UID: "252b6d65-ea4a-4077-9e41-8a0acc068c13"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.728969 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-config" (OuterVolumeSpecName: "config") pod "252b6d65-ea4a-4077-9e41-8a0acc068c13" (UID: "252b6d65-ea4a-4077-9e41-8a0acc068c13"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.729060 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "252b6d65-ea4a-4077-9e41-8a0acc068c13" (UID: "252b6d65-ea4a-4077-9e41-8a0acc068c13"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.729181 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-client-ca" (OuterVolumeSpecName: "client-ca") pod "03e80c16-8460-4458-9bf8-af5f1b85e487" (UID: "03e80c16-8460-4458-9bf8-af5f1b85e487"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.732967 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252b6d65-ea4a-4077-9e41-8a0acc068c13-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "252b6d65-ea4a-4077-9e41-8a0acc068c13" (UID: "252b6d65-ea4a-4077-9e41-8a0acc068c13"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.733291 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03e80c16-8460-4458-9bf8-af5f1b85e487-kube-api-access-59cvm" (OuterVolumeSpecName: "kube-api-access-59cvm") pod "03e80c16-8460-4458-9bf8-af5f1b85e487" (UID: "03e80c16-8460-4458-9bf8-af5f1b85e487"). InnerVolumeSpecName "kube-api-access-59cvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.733386 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e80c16-8460-4458-9bf8-af5f1b85e487-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "03e80c16-8460-4458-9bf8-af5f1b85e487" (UID: "03e80c16-8460-4458-9bf8-af5f1b85e487"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.733828 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252b6d65-ea4a-4077-9e41-8a0acc068c13-kube-api-access-7f7pm" (OuterVolumeSpecName: "kube-api-access-7f7pm") pod "252b6d65-ea4a-4077-9e41-8a0acc068c13" (UID: "252b6d65-ea4a-4077-9e41-8a0acc068c13"). InnerVolumeSpecName "kube-api-access-7f7pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.828880 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fnzt\" (UniqueName: \"kubernetes.io/projected/ccd787bf-c65a-4872-b46d-726bb0f2f690-kube-api-access-2fnzt\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.828975 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-client-ca\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829086 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd787bf-c65a-4872-b46d-726bb0f2f690-serving-cert\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829130 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-config\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829181 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/252b6d65-ea4a-4077-9e41-8a0acc068c13-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829195 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829208 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59cvm\" (UniqueName: \"kubernetes.io/projected/03e80c16-8460-4458-9bf8-af5f1b85e487-kube-api-access-59cvm\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829222 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03e80c16-8460-4458-9bf8-af5f1b85e487-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829233 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829245 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f7pm\" (UniqueName: \"kubernetes.io/projected/252b6d65-ea4a-4077-9e41-8a0acc068c13-kube-api-access-7f7pm\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829256 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829266 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03e80c16-8460-4458-9bf8-af5f1b85e487-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.829277 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252b6d65-ea4a-4077-9e41-8a0acc068c13-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.830455 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-client-ca\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.830591 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-config\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.837414 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd787bf-c65a-4872-b46d-726bb0f2f690-serving-cert\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.846846 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fnzt\" (UniqueName: \"kubernetes.io/projected/ccd787bf-c65a-4872-b46d-726bb0f2f690-kube-api-access-2fnzt\") pod \"route-controller-manager-68fdbdb4b6-6p2g9\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.909381 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.968805 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd"] Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.972556 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7bbdb64b65-vvrmd"] Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.982062 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt"] Mar 20 15:42:15 crc kubenswrapper[4813]: I0320 15:42:15.985310 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6bcf54b765-dqlgt"] Mar 20 15:42:16 crc kubenswrapper[4813]: I0320 15:42:16.041246 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5xf4w" Mar 20 15:42:16 crc kubenswrapper[4813]: E0320 15:42:16.955920 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qcm7m" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" Mar 20 15:42:16 crc kubenswrapper[4813]: E0320 15:42:16.956294 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wsh8b" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" Mar 20 15:42:16 crc kubenswrapper[4813]: E0320 15:42:16.956395 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-4v5fd" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" Mar 20 15:42:16 crc kubenswrapper[4813]: E0320 15:42:16.957328 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-b777z" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" Mar 20 15:42:16 crc kubenswrapper[4813]: I0320 15:42:16.991829 4813 scope.go:117] "RemoveContainer" containerID="0b3cf944c60f1baad3a2b07e6a65648dbedc5c72063c2c633ea9d5cb339a3694" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.222221 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9"] Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.278632 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03e80c16-8460-4458-9bf8-af5f1b85e487" path="/var/lib/kubelet/pods/03e80c16-8460-4458-9bf8-af5f1b85e487/volumes" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.279967 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="252b6d65-ea4a-4077-9e41-8a0acc068c13" path="/var/lib/kubelet/pods/252b6d65-ea4a-4077-9e41-8a0acc068c13/volumes" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.389821 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567022-zfcwm"] Mar 20 15:42:17 crc kubenswrapper[4813]: W0320 15:42:17.404563 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9138f2f5_c58b_4256_88e6_d3c52d034cf4.slice/crio-f4d9c456b4c5f977aca5d19587ea5cf5e9ec073a855960ea4c50fa939ecddeec WatchSource:0}: Error finding container f4d9c456b4c5f977aca5d19587ea5cf5e9ec073a855960ea4c50fa939ecddeec: Status 404 returned error can't find the container with id f4d9c456b4c5f977aca5d19587ea5cf5e9ec073a855960ea4c50fa939ecddeec Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.602139 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.603442 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.616253 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.658111 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" event={"ID":"ccd787bf-c65a-4872-b46d-726bb0f2f690","Type":"ContainerStarted","Data":"9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.658155 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" event={"ID":"ccd787bf-c65a-4872-b46d-726bb0f2f690","Type":"ContainerStarted","Data":"2ef69e5701168b142d8f7af017a4790894d79ded395e2ebf701dde755330327a"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.658395 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.664684 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" event={"ID":"e4d83813-6127-4a79-ad93-bd5cafe64abd","Type":"ContainerStarted","Data":"1699ba9b45404496c4aa0835d5b254dda6eb44875089606d85807447862d0bde"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.667819 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d2241316-a31e-4144-80db-7e75c432c8d1","Type":"ContainerStarted","Data":"83a423813ba4f2244516f739a3ff23fa8774840181a083dff00e943255e996b0"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.671106 4813 generic.go:334] "Generic (PLEG): container finished" podID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerID="99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637" exitCode=0 Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.671173 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcdr6" event={"ID":"f499bfc2-7181-42d0-8e36-16bba273f3f5","Type":"ContainerDied","Data":"99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.674537 4813 generic.go:334] "Generic (PLEG): container finished" podID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerID="53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb" exitCode=0 Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.674590 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grn6l" event={"ID":"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705","Type":"ContainerDied","Data":"53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.677714 4813 generic.go:334] "Generic (PLEG): container finished" podID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerID="3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941" exitCode=0 Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.677764 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wv9qh" event={"ID":"7c3f7704-b939-4e16-b4fb-1addffc3091d","Type":"ContainerDied","Data":"3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.682222 4813 generic.go:334] "Generic (PLEG): container finished" podID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerID="adaa775ad9f768fb147508cf639930fd0f92b3899750ab53d433ed7bec87cd95" exitCode=0 Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.682273 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rxzdv" event={"ID":"dafe7a17-1180-4e97-a6bc-309258d024b6","Type":"ContainerDied","Data":"adaa775ad9f768fb147508cf639930fd0f92b3899750ab53d433ed7bec87cd95"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.691993 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567022-zfcwm" event={"ID":"9138f2f5-c58b-4256-88e6-d3c52d034cf4","Type":"ContainerStarted","Data":"f4d9c456b4c5f977aca5d19587ea5cf5e9ec073a855960ea4c50fa939ecddeec"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.697961 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" podStartSLOduration=16.697949711 podStartE2EDuration="16.697949711s" podCreationTimestamp="2026-03-20 15:42:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:17.67831546 +0000 UTC m=+267.101018301" watchObservedRunningTime="2026-03-20 15:42:17.697949711 +0000 UTC m=+267.120652552" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.699088 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lc5px" event={"ID":"5e5f925a-75e4-485f-9d5e-2be4c2c13616","Type":"ContainerStarted","Data":"dcefa0bf1300a46e411d6e733ec7392b0003d8b9d4401e6d9c426c04779c131b"} Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.729458 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" podStartSLOduration=99.20647037 podStartE2EDuration="2m17.729439505s" podCreationTimestamp="2026-03-20 15:40:00 +0000 UTC" firstStartedPulling="2026-03-20 15:41:38.721655116 +0000 UTC m=+228.144357957" lastFinishedPulling="2026-03-20 15:42:17.244624251 +0000 UTC m=+266.667327092" observedRunningTime="2026-03-20 15:42:17.726463426 +0000 UTC m=+267.149166267" watchObservedRunningTime="2026-03-20 15:42:17.729439505 +0000 UTC m=+267.152142346" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.730756 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.754329 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9861ea25-b65a-41f1-940d-b0635cd8cf67-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.754425 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9861ea25-b65a-41f1-940d-b0635cd8cf67-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.779574 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=31.779559603 podStartE2EDuration="31.779559603s" podCreationTimestamp="2026-03-20 15:41:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:17.757321854 +0000 UTC m=+267.180024695" watchObservedRunningTime="2026-03-20 15:42:17.779559603 +0000 UTC m=+267.202262444" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.855496 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9861ea25-b65a-41f1-940d-b0635cd8cf67-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.855586 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9861ea25-b65a-41f1-940d-b0635cd8cf67-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.855651 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9861ea25-b65a-41f1-940d-b0635cd8cf67-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:17 crc kubenswrapper[4813]: I0320 15:42:17.885246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9861ea25-b65a-41f1-940d-b0635cd8cf67-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.028383 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.077621 4813 csr.go:261] certificate signing request csr-tvtg9 is approved, waiting to be issued Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.085468 4813 csr.go:257] certificate signing request csr-tvtg9 is issued Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.260079 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.392164 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-78c6f765dc-6kgbw"] Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.393094 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.395519 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.395761 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.396111 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.396380 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.396426 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.399021 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.403148 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78c6f765dc-6kgbw"] Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.404971 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.565269 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8d62393-40b7-4d86-b32a-840481c379e4-serving-cert\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.565525 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-config\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.565845 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-proxy-ca-bundles\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.565890 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-client-ca\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.565912 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj7xj\" (UniqueName: \"kubernetes.io/projected/e8d62393-40b7-4d86-b32a-840481c379e4-kube-api-access-cj7xj\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.667053 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-proxy-ca-bundles\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.667091 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-client-ca\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.667108 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj7xj\" (UniqueName: \"kubernetes.io/projected/e8d62393-40b7-4d86-b32a-840481c379e4-kube-api-access-cj7xj\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.667144 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8d62393-40b7-4d86-b32a-840481c379e4-serving-cert\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.667178 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-config\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.668410 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-config\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.668949 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-proxy-ca-bundles\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.669599 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-client-ca\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.672775 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8d62393-40b7-4d86-b32a-840481c379e4-serving-cert\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.684067 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj7xj\" (UniqueName: \"kubernetes.io/projected/e8d62393-40b7-4d86-b32a-840481c379e4-kube-api-access-cj7xj\") pod \"controller-manager-78c6f765dc-6kgbw\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.709727 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grn6l" event={"ID":"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705","Type":"ContainerStarted","Data":"1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb"} Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.712269 4813 generic.go:334] "Generic (PLEG): container finished" podID="e4d83813-6127-4a79-ad93-bd5cafe64abd" containerID="1699ba9b45404496c4aa0835d5b254dda6eb44875089606d85807447862d0bde" exitCode=0 Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.712341 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" event={"ID":"e4d83813-6127-4a79-ad93-bd5cafe64abd","Type":"ContainerDied","Data":"1699ba9b45404496c4aa0835d5b254dda6eb44875089606d85807447862d0bde"} Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.714445 4813 generic.go:334] "Generic (PLEG): container finished" podID="d2241316-a31e-4144-80db-7e75c432c8d1" containerID="83a423813ba4f2244516f739a3ff23fa8774840181a083dff00e943255e996b0" exitCode=0 Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.714504 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d2241316-a31e-4144-80db-7e75c432c8d1","Type":"ContainerDied","Data":"83a423813ba4f2244516f739a3ff23fa8774840181a083dff00e943255e996b0"} Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.716949 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.718328 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lc5px" event={"ID":"5e5f925a-75e4-485f-9d5e-2be4c2c13616","Type":"ContainerStarted","Data":"e3ab434584033777b57861767aa5af02b2fd44962b46637d456d0b632a2905c7"} Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.725857 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcdr6" event={"ID":"f499bfc2-7181-42d0-8e36-16bba273f3f5","Type":"ContainerStarted","Data":"ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305"} Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.730677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9861ea25-b65a-41f1-940d-b0635cd8cf67","Type":"ContainerStarted","Data":"f52f685c8ff2a7ce8981dc00242420a6084b93d5cebfa95f6f94d907f9538586"} Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.730709 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9861ea25-b65a-41f1-940d-b0635cd8cf67","Type":"ContainerStarted","Data":"5153bd5abe22d77175baf0d2f041e9081d969a731197086dfd5bff6c23c46e53"} Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.733040 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-grn6l" podStartSLOduration=9.80090535 podStartE2EDuration="34.733021822s" podCreationTimestamp="2026-03-20 15:41:44 +0000 UTC" firstStartedPulling="2026-03-20 15:41:53.200148655 +0000 UTC m=+242.622851496" lastFinishedPulling="2026-03-20 15:42:18.132265127 +0000 UTC m=+267.554967968" observedRunningTime="2026-03-20 15:42:18.729244422 +0000 UTC m=+268.151947263" watchObservedRunningTime="2026-03-20 15:42:18.733021822 +0000 UTC m=+268.155724663" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.749352 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-lc5px" podStartSLOduration=202.749328824 podStartE2EDuration="3m22.749328824s" podCreationTimestamp="2026-03-20 15:38:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:18.74915791 +0000 UTC m=+268.171860751" watchObservedRunningTime="2026-03-20 15:42:18.749328824 +0000 UTC m=+268.172031665" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.776980 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pcdr6" podStartSLOduration=9.909131417 podStartE2EDuration="34.776958246s" podCreationTimestamp="2026-03-20 15:41:44 +0000 UTC" firstStartedPulling="2026-03-20 15:41:53.200137164 +0000 UTC m=+242.622840035" lastFinishedPulling="2026-03-20 15:42:18.067964023 +0000 UTC m=+267.490666864" observedRunningTime="2026-03-20 15:42:18.773844474 +0000 UTC m=+268.196547315" watchObservedRunningTime="2026-03-20 15:42:18.776958246 +0000 UTC m=+268.199661087" Mar 20 15:42:18 crc kubenswrapper[4813]: I0320 15:42:18.816361 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.8163449489999999 podStartE2EDuration="1.816344949s" podCreationTimestamp="2026-03-20 15:42:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:18.815497057 +0000 UTC m=+268.238199928" watchObservedRunningTime="2026-03-20 15:42:18.816344949 +0000 UTC m=+268.239047790" Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.053311 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78c6f765dc-6kgbw"] Mar 20 15:42:19 crc kubenswrapper[4813]: W0320 15:42:19.075818 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8d62393_40b7_4d86_b32a_840481c379e4.slice/crio-7304fb8fb97683335bad51392959fc7594739921e7e9d6d0768d5ab78a259dc8 WatchSource:0}: Error finding container 7304fb8fb97683335bad51392959fc7594739921e7e9d6d0768d5ab78a259dc8: Status 404 returned error can't find the container with id 7304fb8fb97683335bad51392959fc7594739921e7e9d6d0768d5ab78a259dc8 Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.086537 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-11-25 22:53:58.217241212 +0000 UTC Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.086587 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6007h11m39.130657453s for next certificate rotation Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.739622 4813 generic.go:334] "Generic (PLEG): container finished" podID="9861ea25-b65a-41f1-940d-b0635cd8cf67" containerID="f52f685c8ff2a7ce8981dc00242420a6084b93d5cebfa95f6f94d907f9538586" exitCode=0 Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.739762 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9861ea25-b65a-41f1-940d-b0635cd8cf67","Type":"ContainerDied","Data":"f52f685c8ff2a7ce8981dc00242420a6084b93d5cebfa95f6f94d907f9538586"} Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.743187 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" event={"ID":"e8d62393-40b7-4d86-b32a-840481c379e4","Type":"ContainerStarted","Data":"0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0"} Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.743227 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" event={"ID":"e8d62393-40b7-4d86-b32a-840481c379e4","Type":"ContainerStarted","Data":"7304fb8fb97683335bad51392959fc7594739921e7e9d6d0768d5ab78a259dc8"} Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.743710 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.746379 4813 generic.go:334] "Generic (PLEG): container finished" podID="9138f2f5-c58b-4256-88e6-d3c52d034cf4" containerID="8470145f0a3dae96a1d31d937e1c90a87d8dcc8e1bc0eb7995dc2bae0799a2a0" exitCode=0 Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.746570 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567022-zfcwm" event={"ID":"9138f2f5-c58b-4256-88e6-d3c52d034cf4","Type":"ContainerDied","Data":"8470145f0a3dae96a1d31d937e1c90a87d8dcc8e1bc0eb7995dc2bae0799a2a0"} Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.750795 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:19 crc kubenswrapper[4813]: I0320 15:42:19.780286 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" podStartSLOduration=18.780265656 podStartE2EDuration="18.780265656s" podCreationTimestamp="2026-03-20 15:42:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:19.774450712 +0000 UTC m=+269.197153553" watchObservedRunningTime="2026-03-20 15:42:19.780265656 +0000 UTC m=+269.202968497" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.009748 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.032530 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmkf9\" (UniqueName: \"kubernetes.io/projected/e4d83813-6127-4a79-ad93-bd5cafe64abd-kube-api-access-cmkf9\") pod \"e4d83813-6127-4a79-ad93-bd5cafe64abd\" (UID: \"e4d83813-6127-4a79-ad93-bd5cafe64abd\") " Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.038941 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4d83813-6127-4a79-ad93-bd5cafe64abd-kube-api-access-cmkf9" (OuterVolumeSpecName: "kube-api-access-cmkf9") pod "e4d83813-6127-4a79-ad93-bd5cafe64abd" (UID: "e4d83813-6127-4a79-ad93-bd5cafe64abd"). InnerVolumeSpecName "kube-api-access-cmkf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.086981 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-12-16 11:16:37.822909039 +0000 UTC Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.087012 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6499h34m17.735899766s for next certificate rotation Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.087669 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.133376 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2241316-a31e-4144-80db-7e75c432c8d1-kube-api-access\") pod \"d2241316-a31e-4144-80db-7e75c432c8d1\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.133783 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2241316-a31e-4144-80db-7e75c432c8d1-kubelet-dir\") pod \"d2241316-a31e-4144-80db-7e75c432c8d1\" (UID: \"d2241316-a31e-4144-80db-7e75c432c8d1\") " Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.133927 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d2241316-a31e-4144-80db-7e75c432c8d1-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d2241316-a31e-4144-80db-7e75c432c8d1" (UID: "d2241316-a31e-4144-80db-7e75c432c8d1"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.134180 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmkf9\" (UniqueName: \"kubernetes.io/projected/e4d83813-6127-4a79-ad93-bd5cafe64abd-kube-api-access-cmkf9\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.134210 4813 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2241316-a31e-4144-80db-7e75c432c8d1-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.136581 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2241316-a31e-4144-80db-7e75c432c8d1-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d2241316-a31e-4144-80db-7e75c432c8d1" (UID: "d2241316-a31e-4144-80db-7e75c432c8d1"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.234640 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2241316-a31e-4144-80db-7e75c432c8d1-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.754458 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d2241316-a31e-4144-80db-7e75c432c8d1","Type":"ContainerDied","Data":"1e10597486941ac52b7c772858da49f38674a3ae2dea7f3407ea417de10266dd"} Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.754511 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e10597486941ac52b7c772858da49f38674a3ae2dea7f3407ea417de10266dd" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.754581 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.764531 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.764586 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567020-4l7qk" event={"ID":"e4d83813-6127-4a79-ad93-bd5cafe64abd","Type":"ContainerDied","Data":"2d647d24952c798ff2878fc9bf8974892d134d8924cc3363028ac84d476d58ed"} Mar 20 15:42:20 crc kubenswrapper[4813]: I0320 15:42:20.764635 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d647d24952c798ff2878fc9bf8974892d134d8924cc3363028ac84d476d58ed" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.020824 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.054196 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9861ea25-b65a-41f1-940d-b0635cd8cf67-kube-api-access\") pod \"9861ea25-b65a-41f1-940d-b0635cd8cf67\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.054383 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9861ea25-b65a-41f1-940d-b0635cd8cf67-kubelet-dir\") pod \"9861ea25-b65a-41f1-940d-b0635cd8cf67\" (UID: \"9861ea25-b65a-41f1-940d-b0635cd8cf67\") " Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.054701 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9861ea25-b65a-41f1-940d-b0635cd8cf67-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9861ea25-b65a-41f1-940d-b0635cd8cf67" (UID: "9861ea25-b65a-41f1-940d-b0635cd8cf67"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.060701 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9861ea25-b65a-41f1-940d-b0635cd8cf67-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9861ea25-b65a-41f1-940d-b0635cd8cf67" (UID: "9861ea25-b65a-41f1-940d-b0635cd8cf67"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.082566 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567022-zfcwm" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.155555 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flhpw\" (UniqueName: \"kubernetes.io/projected/9138f2f5-c58b-4256-88e6-d3c52d034cf4-kube-api-access-flhpw\") pod \"9138f2f5-c58b-4256-88e6-d3c52d034cf4\" (UID: \"9138f2f5-c58b-4256-88e6-d3c52d034cf4\") " Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.157401 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9861ea25-b65a-41f1-940d-b0635cd8cf67-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.157425 4813 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9861ea25-b65a-41f1-940d-b0635cd8cf67-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.158874 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9138f2f5-c58b-4256-88e6-d3c52d034cf4-kube-api-access-flhpw" (OuterVolumeSpecName: "kube-api-access-flhpw") pod "9138f2f5-c58b-4256-88e6-d3c52d034cf4" (UID: "9138f2f5-c58b-4256-88e6-d3c52d034cf4"). InnerVolumeSpecName "kube-api-access-flhpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.258981 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flhpw\" (UniqueName: \"kubernetes.io/projected/9138f2f5-c58b-4256-88e6-d3c52d034cf4-kube-api-access-flhpw\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.684131 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-78c6f765dc-6kgbw"] Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.786010 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9"] Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.786554 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" podUID="ccd787bf-c65a-4872-b46d-726bb0f2f690" containerName="route-controller-manager" containerID="cri-o://9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898" gracePeriod=30 Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.789216 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567022-zfcwm" event={"ID":"9138f2f5-c58b-4256-88e6-d3c52d034cf4","Type":"ContainerDied","Data":"f4d9c456b4c5f977aca5d19587ea5cf5e9ec073a855960ea4c50fa939ecddeec"} Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.789246 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567022-zfcwm" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.789257 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4d9c456b4c5f977aca5d19587ea5cf5e9ec073a855960ea4c50fa939ecddeec" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.792646 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9861ea25-b65a-41f1-940d-b0635cd8cf67","Type":"ContainerDied","Data":"5153bd5abe22d77175baf0d2f041e9081d969a731197086dfd5bff6c23c46e53"} Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.792875 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5153bd5abe22d77175baf0d2f041e9081d969a731197086dfd5bff6c23c46e53" Mar 20 15:42:21 crc kubenswrapper[4813]: I0320 15:42:21.792700 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.130508 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.171127 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-config\") pod \"ccd787bf-c65a-4872-b46d-726bb0f2f690\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.171301 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fnzt\" (UniqueName: \"kubernetes.io/projected/ccd787bf-c65a-4872-b46d-726bb0f2f690-kube-api-access-2fnzt\") pod \"ccd787bf-c65a-4872-b46d-726bb0f2f690\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.171369 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-client-ca\") pod \"ccd787bf-c65a-4872-b46d-726bb0f2f690\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.171474 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd787bf-c65a-4872-b46d-726bb0f2f690-serving-cert\") pod \"ccd787bf-c65a-4872-b46d-726bb0f2f690\" (UID: \"ccd787bf-c65a-4872-b46d-726bb0f2f690\") " Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.172931 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-client-ca" (OuterVolumeSpecName: "client-ca") pod "ccd787bf-c65a-4872-b46d-726bb0f2f690" (UID: "ccd787bf-c65a-4872-b46d-726bb0f2f690"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.173003 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-config" (OuterVolumeSpecName: "config") pod "ccd787bf-c65a-4872-b46d-726bb0f2f690" (UID: "ccd787bf-c65a-4872-b46d-726bb0f2f690"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.178122 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd787bf-c65a-4872-b46d-726bb0f2f690-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ccd787bf-c65a-4872-b46d-726bb0f2f690" (UID: "ccd787bf-c65a-4872-b46d-726bb0f2f690"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.178165 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd787bf-c65a-4872-b46d-726bb0f2f690-kube-api-access-2fnzt" (OuterVolumeSpecName: "kube-api-access-2fnzt") pod "ccd787bf-c65a-4872-b46d-726bb0f2f690" (UID: "ccd787bf-c65a-4872-b46d-726bb0f2f690"). InnerVolumeSpecName "kube-api-access-2fnzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219044 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Mar 20 15:42:22 crc kubenswrapper[4813]: E0320 15:42:22.219319 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9861ea25-b65a-41f1-940d-b0635cd8cf67" containerName="pruner" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219337 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9861ea25-b65a-41f1-940d-b0635cd8cf67" containerName="pruner" Mar 20 15:42:22 crc kubenswrapper[4813]: E0320 15:42:22.219347 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4d83813-6127-4a79-ad93-bd5cafe64abd" containerName="oc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219353 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4d83813-6127-4a79-ad93-bd5cafe64abd" containerName="oc" Mar 20 15:42:22 crc kubenswrapper[4813]: E0320 15:42:22.219364 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd787bf-c65a-4872-b46d-726bb0f2f690" containerName="route-controller-manager" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219370 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd787bf-c65a-4872-b46d-726bb0f2f690" containerName="route-controller-manager" Mar 20 15:42:22 crc kubenswrapper[4813]: E0320 15:42:22.219385 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9138f2f5-c58b-4256-88e6-d3c52d034cf4" containerName="oc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219391 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9138f2f5-c58b-4256-88e6-d3c52d034cf4" containerName="oc" Mar 20 15:42:22 crc kubenswrapper[4813]: E0320 15:42:22.219403 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2241316-a31e-4144-80db-7e75c432c8d1" containerName="pruner" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219408 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2241316-a31e-4144-80db-7e75c432c8d1" containerName="pruner" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219523 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd787bf-c65a-4872-b46d-726bb0f2f690" containerName="route-controller-manager" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219537 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4d83813-6127-4a79-ad93-bd5cafe64abd" containerName="oc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219552 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2241316-a31e-4144-80db-7e75c432c8d1" containerName="pruner" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219563 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9861ea25-b65a-41f1-940d-b0635cd8cf67" containerName="pruner" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219572 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9138f2f5-c58b-4256-88e6-d3c52d034cf4" containerName="oc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.219983 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.222858 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.223218 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.231100 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.272841 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kube-api-access\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.272878 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-var-lock\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.272925 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.273197 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.273241 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fnzt\" (UniqueName: \"kubernetes.io/projected/ccd787bf-c65a-4872-b46d-726bb0f2f690-kube-api-access-2fnzt\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.273257 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ccd787bf-c65a-4872-b46d-726bb0f2f690-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.273276 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd787bf-c65a-4872-b46d-726bb0f2f690-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.373960 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.374066 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-var-lock\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.374086 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kube-api-access\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.374094 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.374167 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-var-lock\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.389900 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kube-api-access\") pod \"installer-9-crc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.542050 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.800737 4813 generic.go:334] "Generic (PLEG): container finished" podID="ccd787bf-c65a-4872-b46d-726bb0f2f690" containerID="9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898" exitCode=0 Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.801158 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" podUID="e8d62393-40b7-4d86-b32a-840481c379e4" containerName="controller-manager" containerID="cri-o://0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0" gracePeriod=30 Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.800797 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.800835 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" event={"ID":"ccd787bf-c65a-4872-b46d-726bb0f2f690","Type":"ContainerDied","Data":"9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898"} Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.801558 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9" event={"ID":"ccd787bf-c65a-4872-b46d-726bb0f2f690","Type":"ContainerDied","Data":"2ef69e5701168b142d8f7af017a4790894d79ded395e2ebf701dde755330327a"} Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.801587 4813 scope.go:117] "RemoveContainer" containerID="9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.834405 4813 scope.go:117] "RemoveContainer" containerID="9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898" Mar 20 15:42:22 crc kubenswrapper[4813]: E0320 15:42:22.835433 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898\": container with ID starting with 9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898 not found: ID does not exist" containerID="9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.835471 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898"} err="failed to get container status \"9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898\": rpc error: code = NotFound desc = could not find container \"9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898\": container with ID starting with 9158a48f7f94a6fc605984a73a6a2af11d346f4d0a03048c9d3e1fcb53c7a898 not found: ID does not exist" Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.852286 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9"] Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.858529 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fdbdb4b6-6p2g9"] Mar 20 15:42:22 crc kubenswrapper[4813]: I0320 15:42:22.912304 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Mar 20 15:42:22 crc kubenswrapper[4813]: W0320 15:42:22.921542 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode5b57c2d_92a5_479c_bcb3_5382a04091fc.slice/crio-b13883e861a95f933d0a9fba973bc6663c217ba7ae38025ed8c36e7f1ad59825 WatchSource:0}: Error finding container b13883e861a95f933d0a9fba973bc6663c217ba7ae38025ed8c36e7f1ad59825: Status 404 returned error can't find the container with id b13883e861a95f933d0a9fba973bc6663c217ba7ae38025ed8c36e7f1ad59825 Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.273104 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccd787bf-c65a-4872-b46d-726bb0f2f690" path="/var/lib/kubelet/pods/ccd787bf-c65a-4872-b46d-726bb0f2f690/volumes" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.394645 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv"] Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.397092 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.399322 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.400473 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.400540 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.401645 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.401792 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.402583 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.406687 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv"] Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.487714 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-client-ca\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.487753 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czrzr\" (UniqueName: \"kubernetes.io/projected/fd373034-7e5f-48a8-b883-b2e4a58a45da-kube-api-access-czrzr\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.487823 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd373034-7e5f-48a8-b883-b2e4a58a45da-serving-cert\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.487972 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-config\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.591273 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-config\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.591370 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-client-ca\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.591400 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czrzr\" (UniqueName: \"kubernetes.io/projected/fd373034-7e5f-48a8-b883-b2e4a58a45da-kube-api-access-czrzr\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.591451 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd373034-7e5f-48a8-b883-b2e4a58a45da-serving-cert\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.592561 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-client-ca\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.593043 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-config\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.607612 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd373034-7e5f-48a8-b883-b2e4a58a45da-serving-cert\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.607700 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czrzr\" (UniqueName: \"kubernetes.io/projected/fd373034-7e5f-48a8-b883-b2e4a58a45da-kube-api-access-czrzr\") pod \"route-controller-manager-754b5d84d9-4nqtv\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.709684 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.726728 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.793621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cj7xj\" (UniqueName: \"kubernetes.io/projected/e8d62393-40b7-4d86-b32a-840481c379e4-kube-api-access-cj7xj\") pod \"e8d62393-40b7-4d86-b32a-840481c379e4\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.793674 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-config\") pod \"e8d62393-40b7-4d86-b32a-840481c379e4\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.793754 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8d62393-40b7-4d86-b32a-840481c379e4-serving-cert\") pod \"e8d62393-40b7-4d86-b32a-840481c379e4\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.793783 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-client-ca\") pod \"e8d62393-40b7-4d86-b32a-840481c379e4\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.793808 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-proxy-ca-bundles\") pod \"e8d62393-40b7-4d86-b32a-840481c379e4\" (UID: \"e8d62393-40b7-4d86-b32a-840481c379e4\") " Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.794380 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-client-ca" (OuterVolumeSpecName: "client-ca") pod "e8d62393-40b7-4d86-b32a-840481c379e4" (UID: "e8d62393-40b7-4d86-b32a-840481c379e4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.794592 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-config" (OuterVolumeSpecName: "config") pod "e8d62393-40b7-4d86-b32a-840481c379e4" (UID: "e8d62393-40b7-4d86-b32a-840481c379e4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.795251 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e8d62393-40b7-4d86-b32a-840481c379e4" (UID: "e8d62393-40b7-4d86-b32a-840481c379e4"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.795541 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.795555 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.795564 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8d62393-40b7-4d86-b32a-840481c379e4-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.797214 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8d62393-40b7-4d86-b32a-840481c379e4-kube-api-access-cj7xj" (OuterVolumeSpecName: "kube-api-access-cj7xj") pod "e8d62393-40b7-4d86-b32a-840481c379e4" (UID: "e8d62393-40b7-4d86-b32a-840481c379e4"). InnerVolumeSpecName "kube-api-access-cj7xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.798295 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8d62393-40b7-4d86-b32a-840481c379e4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e8d62393-40b7-4d86-b32a-840481c379e4" (UID: "e8d62393-40b7-4d86-b32a-840481c379e4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.811791 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e5b57c2d-92a5-479c-bcb3-5382a04091fc","Type":"ContainerStarted","Data":"e145ec28df6cda1f111c2745fdfcd86c4d33d2fdcf2d6113a48957f7ef2830c5"} Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.811827 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e5b57c2d-92a5-479c-bcb3-5382a04091fc","Type":"ContainerStarted","Data":"b13883e861a95f933d0a9fba973bc6663c217ba7ae38025ed8c36e7f1ad59825"} Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.816545 4813 generic.go:334] "Generic (PLEG): container finished" podID="e8d62393-40b7-4d86-b32a-840481c379e4" containerID="0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0" exitCode=0 Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.816650 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.816763 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" event={"ID":"e8d62393-40b7-4d86-b32a-840481c379e4","Type":"ContainerDied","Data":"0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0"} Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.816824 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78c6f765dc-6kgbw" event={"ID":"e8d62393-40b7-4d86-b32a-840481c379e4","Type":"ContainerDied","Data":"7304fb8fb97683335bad51392959fc7594739921e7e9d6d0768d5ab78a259dc8"} Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.816846 4813 scope.go:117] "RemoveContainer" containerID="0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.829681 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.829659633 podStartE2EDuration="1.829659633s" podCreationTimestamp="2026-03-20 15:42:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:23.826049848 +0000 UTC m=+273.248752689" watchObservedRunningTime="2026-03-20 15:42:23.829659633 +0000 UTC m=+273.252362474" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.846358 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-78c6f765dc-6kgbw"] Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.850133 4813 scope.go:117] "RemoveContainer" containerID="0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.850140 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-78c6f765dc-6kgbw"] Mar 20 15:42:23 crc kubenswrapper[4813]: E0320 15:42:23.850581 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0\": container with ID starting with 0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0 not found: ID does not exist" containerID="0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.850630 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0"} err="failed to get container status \"0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0\": rpc error: code = NotFound desc = could not find container \"0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0\": container with ID starting with 0f96aaced8d7c0bfa55d3677bf4a11994f10896109d1737106d98e32758971e0 not found: ID does not exist" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.897018 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cj7xj\" (UniqueName: \"kubernetes.io/projected/e8d62393-40b7-4d86-b32a-840481c379e4-kube-api-access-cj7xj\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.897089 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8d62393-40b7-4d86-b32a-840481c379e4-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:23 crc kubenswrapper[4813]: I0320 15:42:23.906433 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv"] Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.795200 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.796161 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.835873 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" event={"ID":"fd373034-7e5f-48a8-b883-b2e4a58a45da","Type":"ContainerStarted","Data":"3f79864bdcf199d27231e0a5a491ebd440e9a90c5219acbe9755425c14c9c598"} Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.836146 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" event={"ID":"fd373034-7e5f-48a8-b883-b2e4a58a45da","Type":"ContainerStarted","Data":"6ed8e9e68147f42171c47c0f952d925455ad2f2d0c400e7ed80c8c01894f04b5"} Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.836170 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.840899 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.873355 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" podStartSLOduration=3.873303891 podStartE2EDuration="3.873303891s" podCreationTimestamp="2026-03-20 15:42:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:24.853918587 +0000 UTC m=+274.276621438" watchObservedRunningTime="2026-03-20 15:42:24.873303891 +0000 UTC m=+274.296006732" Mar 20 15:42:24 crc kubenswrapper[4813]: I0320 15:42:24.965661 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:42:25 crc kubenswrapper[4813]: I0320 15:42:25.190358 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:42:25 crc kubenswrapper[4813]: I0320 15:42:25.191513 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.229341 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.272981 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8d62393-40b7-4d86-b32a-840481c379e4" path="/var/lib/kubelet/pods/e8d62393-40b7-4d86-b32a-840481c379e4/volumes" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.398536 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-8bb745455-qql7f"] Mar 20 15:42:26 crc kubenswrapper[4813]: E0320 15:42:25.398782 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8d62393-40b7-4d86-b32a-840481c379e4" containerName="controller-manager" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.398798 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8d62393-40b7-4d86-b32a-840481c379e4" containerName="controller-manager" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.398916 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8d62393-40b7-4d86-b32a-840481c379e4" containerName="controller-manager" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.399355 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.405343 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.405408 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.438693 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.441325 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.441395 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.441450 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.441597 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.445321 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8bb745455-qql7f"] Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.538846 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbjs9\" (UniqueName: \"kubernetes.io/projected/59f6ed7a-30de-4a22-a639-4276ccdb02e5-kube-api-access-vbjs9\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.538906 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-client-ca\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.538962 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f6ed7a-30de-4a22-a639-4276ccdb02e5-serving-cert\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.538990 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-proxy-ca-bundles\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.539039 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-config\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.639959 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbjs9\" (UniqueName: \"kubernetes.io/projected/59f6ed7a-30de-4a22-a639-4276ccdb02e5-kube-api-access-vbjs9\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.640057 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-client-ca\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.640087 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f6ed7a-30de-4a22-a639-4276ccdb02e5-serving-cert\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.640125 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-proxy-ca-bundles\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.640164 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-config\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.641134 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-client-ca\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.641854 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-config\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.643145 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-proxy-ca-bundles\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.645530 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f6ed7a-30de-4a22-a639-4276ccdb02e5-serving-cert\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.656631 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbjs9\" (UniqueName: \"kubernetes.io/projected/59f6ed7a-30de-4a22-a639-4276ccdb02e5-kube-api-access-vbjs9\") pod \"controller-manager-8bb745455-qql7f\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.755441 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.881709 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:25.884635 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:42:26 crc kubenswrapper[4813]: I0320 15:42:26.196968 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcdr6"] Mar 20 15:42:27 crc kubenswrapper[4813]: I0320 15:42:27.848908 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pcdr6" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="registry-server" containerID="cri-o://ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305" gracePeriod=2 Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.430847 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8bb745455-qql7f"] Mar 20 15:42:28 crc kubenswrapper[4813]: W0320 15:42:28.441961 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59f6ed7a_30de_4a22_a639_4276ccdb02e5.slice/crio-8c468186d0de64548c093097d40045e6c1a39bb3d8b2acb503d21fe57b78247b WatchSource:0}: Error finding container 8c468186d0de64548c093097d40045e6c1a39bb3d8b2acb503d21fe57b78247b: Status 404 returned error can't find the container with id 8c468186d0de64548c093097d40045e6c1a39bb3d8b2acb503d21fe57b78247b Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.483426 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.573934 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-catalog-content\") pod \"f499bfc2-7181-42d0-8e36-16bba273f3f5\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.574377 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zcht\" (UniqueName: \"kubernetes.io/projected/f499bfc2-7181-42d0-8e36-16bba273f3f5-kube-api-access-2zcht\") pod \"f499bfc2-7181-42d0-8e36-16bba273f3f5\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.574409 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-utilities\") pod \"f499bfc2-7181-42d0-8e36-16bba273f3f5\" (UID: \"f499bfc2-7181-42d0-8e36-16bba273f3f5\") " Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.575124 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-utilities" (OuterVolumeSpecName: "utilities") pod "f499bfc2-7181-42d0-8e36-16bba273f3f5" (UID: "f499bfc2-7181-42d0-8e36-16bba273f3f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.578688 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f499bfc2-7181-42d0-8e36-16bba273f3f5-kube-api-access-2zcht" (OuterVolumeSpecName: "kube-api-access-2zcht") pod "f499bfc2-7181-42d0-8e36-16bba273f3f5" (UID: "f499bfc2-7181-42d0-8e36-16bba273f3f5"). InnerVolumeSpecName "kube-api-access-2zcht". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.676386 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zcht\" (UniqueName: \"kubernetes.io/projected/f499bfc2-7181-42d0-8e36-16bba273f3f5-kube-api-access-2zcht\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.676421 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.855294 4813 generic.go:334] "Generic (PLEG): container finished" podID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerID="ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305" exitCode=0 Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.855350 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcdr6" event={"ID":"f499bfc2-7181-42d0-8e36-16bba273f3f5","Type":"ContainerDied","Data":"ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305"} Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.855374 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcdr6" event={"ID":"f499bfc2-7181-42d0-8e36-16bba273f3f5","Type":"ContainerDied","Data":"873a2c5597ebdf9708d12b96fa55a3fde5f66edc627c36522dbe076f6cb09cab"} Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.855391 4813 scope.go:117] "RemoveContainer" containerID="ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.855394 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcdr6" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.860170 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" event={"ID":"59f6ed7a-30de-4a22-a639-4276ccdb02e5","Type":"ContainerStarted","Data":"a28889b5c14bdb28d1590d9dfe63e862a27fc558eb6cf8d4401543e64410fad4"} Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.860211 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" event={"ID":"59f6ed7a-30de-4a22-a639-4276ccdb02e5","Type":"ContainerStarted","Data":"8c468186d0de64548c093097d40045e6c1a39bb3d8b2acb503d21fe57b78247b"} Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.862972 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wv9qh" event={"ID":"7c3f7704-b939-4e16-b4fb-1addffc3091d","Type":"ContainerStarted","Data":"c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937"} Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.864465 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rxzdv" event={"ID":"dafe7a17-1180-4e97-a6bc-309258d024b6","Type":"ContainerStarted","Data":"801ca882191d62f73f5652a6fbdbe21acdae302746fc657ab4a0a81a8fb0daa9"} Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.883708 4813 scope.go:117] "RemoveContainer" containerID="99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.912801 4813 scope.go:117] "RemoveContainer" containerID="5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.931829 4813 scope.go:117] "RemoveContainer" containerID="ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305" Mar 20 15:42:28 crc kubenswrapper[4813]: E0320 15:42:28.932401 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305\": container with ID starting with ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305 not found: ID does not exist" containerID="ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.932477 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305"} err="failed to get container status \"ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305\": rpc error: code = NotFound desc = could not find container \"ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305\": container with ID starting with ce59682a491201025d529f7df1d3732b373a4fdb25ad6feb24a0be974f35e305 not found: ID does not exist" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.932535 4813 scope.go:117] "RemoveContainer" containerID="99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637" Mar 20 15:42:28 crc kubenswrapper[4813]: E0320 15:42:28.932984 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637\": container with ID starting with 99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637 not found: ID does not exist" containerID="99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.933017 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637"} err="failed to get container status \"99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637\": rpc error: code = NotFound desc = could not find container \"99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637\": container with ID starting with 99ece2b86c99053d12a2daa8487470d7d4cc1ad9b3010d2e01673dca78013637 not found: ID does not exist" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.933038 4813 scope.go:117] "RemoveContainer" containerID="5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1" Mar 20 15:42:28 crc kubenswrapper[4813]: E0320 15:42:28.933386 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1\": container with ID starting with 5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1 not found: ID does not exist" containerID="5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1" Mar 20 15:42:28 crc kubenswrapper[4813]: I0320 15:42:28.933430 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1"} err="failed to get container status \"5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1\": rpc error: code = NotFound desc = could not find container \"5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1\": container with ID starting with 5b5f395437327413215c220be7f6a3d5e0b34a712336311b36cc2e99c94dc3c1 not found: ID does not exist" Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.360492 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f499bfc2-7181-42d0-8e36-16bba273f3f5" (UID: "f499bfc2-7181-42d0-8e36-16bba273f3f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.385967 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f499bfc2-7181-42d0-8e36-16bba273f3f5-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.480249 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcdr6"] Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.487680 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcdr6"] Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.872194 4813 generic.go:334] "Generic (PLEG): container finished" podID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerID="c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937" exitCode=0 Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.872240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wv9qh" event={"ID":"7c3f7704-b939-4e16-b4fb-1addffc3091d","Type":"ContainerDied","Data":"c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937"} Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.875654 4813 generic.go:334] "Generic (PLEG): container finished" podID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerID="801ca882191d62f73f5652a6fbdbe21acdae302746fc657ab4a0a81a8fb0daa9" exitCode=0 Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.876373 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rxzdv" event={"ID":"dafe7a17-1180-4e97-a6bc-309258d024b6","Type":"ContainerDied","Data":"801ca882191d62f73f5652a6fbdbe21acdae302746fc657ab4a0a81a8fb0daa9"} Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.876418 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.885025 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:29 crc kubenswrapper[4813]: I0320 15:42:29.938135 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" podStartSLOduration=8.938116218 podStartE2EDuration="8.938116218s" podCreationTimestamp="2026-03-20 15:42:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:29.927348333 +0000 UTC m=+279.350051194" watchObservedRunningTime="2026-03-20 15:42:29.938116218 +0000 UTC m=+279.360819059" Mar 20 15:42:31 crc kubenswrapper[4813]: I0320 15:42:31.280025 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" path="/var/lib/kubelet/pods/f499bfc2-7181-42d0-8e36-16bba273f3f5/volumes" Mar 20 15:42:33 crc kubenswrapper[4813]: I0320 15:42:33.842383 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:42:33 crc kubenswrapper[4813]: I0320 15:42:33.842726 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:42:37 crc kubenswrapper[4813]: I0320 15:42:37.943012 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wsh8b" event={"ID":"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f","Type":"ContainerStarted","Data":"f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b"} Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.953244 4813 generic.go:334] "Generic (PLEG): container finished" podID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerID="f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b" exitCode=0 Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.953313 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wsh8b" event={"ID":"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f","Type":"ContainerDied","Data":"f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b"} Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.955499 4813 generic.go:334] "Generic (PLEG): container finished" podID="deacbab9-8fbd-4707-8098-5d748d77caae" containerID="6b23d32999057bc85298799baa3dda74e73cc4e699811b701fc53821d7bb57d2" exitCode=0 Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.955515 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcm7m" event={"ID":"deacbab9-8fbd-4707-8098-5d748d77caae","Type":"ContainerDied","Data":"6b23d32999057bc85298799baa3dda74e73cc4e699811b701fc53821d7bb57d2"} Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.963662 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wv9qh" event={"ID":"7c3f7704-b939-4e16-b4fb-1addffc3091d","Type":"ContainerStarted","Data":"fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb"} Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.966376 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rxzdv" event={"ID":"dafe7a17-1180-4e97-a6bc-309258d024b6","Type":"ContainerStarted","Data":"27979f5c493e980100999afa9bca3ec362f55ab8999cad5bcb913459812dd4cb"} Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.969184 4813 generic.go:334] "Generic (PLEG): container finished" podID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerID="fffe450bf0afe561408acbf42de5f1e23a0c43f43aff7b4c61f0e0b87bd37e52" exitCode=0 Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.969243 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4v5fd" event={"ID":"ddf7608c-e3c8-40c2-a92e-98605cd5f35a","Type":"ContainerDied","Data":"fffe450bf0afe561408acbf42de5f1e23a0c43f43aff7b4c61f0e0b87bd37e52"} Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.972526 4813 generic.go:334] "Generic (PLEG): container finished" podID="6f6910f6-780a-428c-a21b-f6702a912af1" containerID="b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b" exitCode=0 Mar 20 15:42:38 crc kubenswrapper[4813]: I0320 15:42:38.972564 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b777z" event={"ID":"6f6910f6-780a-428c-a21b-f6702a912af1","Type":"ContainerDied","Data":"b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b"} Mar 20 15:42:39 crc kubenswrapper[4813]: I0320 15:42:39.030604 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rxzdv" podStartSLOduration=33.521483977 podStartE2EDuration="54.030583135s" podCreationTimestamp="2026-03-20 15:41:45 +0000 UTC" firstStartedPulling="2026-03-20 15:42:17.690689408 +0000 UTC m=+267.113392249" lastFinishedPulling="2026-03-20 15:42:38.199788566 +0000 UTC m=+287.622491407" observedRunningTime="2026-03-20 15:42:39.029177808 +0000 UTC m=+288.451880659" watchObservedRunningTime="2026-03-20 15:42:39.030583135 +0000 UTC m=+288.453285996" Mar 20 15:42:39 crc kubenswrapper[4813]: I0320 15:42:39.071768 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wv9qh" podStartSLOduration=34.443413221 podStartE2EDuration="54.071753166s" podCreationTimestamp="2026-03-20 15:41:45 +0000 UTC" firstStartedPulling="2026-03-20 15:42:17.67907015 +0000 UTC m=+267.101772991" lastFinishedPulling="2026-03-20 15:42:37.307410095 +0000 UTC m=+286.730112936" observedRunningTime="2026-03-20 15:42:39.069702211 +0000 UTC m=+288.492405052" watchObservedRunningTime="2026-03-20 15:42:39.071753166 +0000 UTC m=+288.494456007" Mar 20 15:42:39 crc kubenswrapper[4813]: I0320 15:42:39.984233 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4v5fd" event={"ID":"ddf7608c-e3c8-40c2-a92e-98605cd5f35a","Type":"ContainerStarted","Data":"4c73b30703cdeae11d7cbf8e5128667ad84ba59384919f73dc2bd738ee5ce861"} Mar 20 15:42:39 crc kubenswrapper[4813]: I0320 15:42:39.986551 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b777z" event={"ID":"6f6910f6-780a-428c-a21b-f6702a912af1","Type":"ContainerStarted","Data":"42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4"} Mar 20 15:42:40 crc kubenswrapper[4813]: I0320 15:42:40.038302 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wsh8b" event={"ID":"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f","Type":"ContainerStarted","Data":"dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979"} Mar 20 15:42:40 crc kubenswrapper[4813]: I0320 15:42:40.078504 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4v5fd" podStartSLOduration=2.789795314 podStartE2EDuration="58.078452375s" podCreationTimestamp="2026-03-20 15:41:42 +0000 UTC" firstStartedPulling="2026-03-20 15:41:44.281961031 +0000 UTC m=+233.704663872" lastFinishedPulling="2026-03-20 15:42:39.570618092 +0000 UTC m=+288.993320933" observedRunningTime="2026-03-20 15:42:40.050248008 +0000 UTC m=+289.472950849" watchObservedRunningTime="2026-03-20 15:42:40.078452375 +0000 UTC m=+289.501155216" Mar 20 15:42:40 crc kubenswrapper[4813]: I0320 15:42:40.083078 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-b777z" podStartSLOduration=1.635928461 podStartE2EDuration="58.083061747s" podCreationTimestamp="2026-03-20 15:41:42 +0000 UTC" firstStartedPulling="2026-03-20 15:41:43.228530129 +0000 UTC m=+232.651232970" lastFinishedPulling="2026-03-20 15:42:39.675663415 +0000 UTC m=+289.098366256" observedRunningTime="2026-03-20 15:42:40.076174445 +0000 UTC m=+289.498877286" watchObservedRunningTime="2026-03-20 15:42:40.083061747 +0000 UTC m=+289.505764668" Mar 20 15:42:41 crc kubenswrapper[4813]: I0320 15:42:41.048852 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcm7m" event={"ID":"deacbab9-8fbd-4707-8098-5d748d77caae","Type":"ContainerStarted","Data":"188e1ef66c7c337c50af03613a24b1b174fb2329d5352a64e741b772ba608ef6"} Mar 20 15:42:41 crc kubenswrapper[4813]: I0320 15:42:41.063513 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wsh8b" podStartSLOduration=3.941668882 podStartE2EDuration="59.063494271s" podCreationTimestamp="2026-03-20 15:41:42 +0000 UTC" firstStartedPulling="2026-03-20 15:41:44.341687595 +0000 UTC m=+233.764390436" lastFinishedPulling="2026-03-20 15:42:39.463512984 +0000 UTC m=+288.886215825" observedRunningTime="2026-03-20 15:42:40.101830915 +0000 UTC m=+289.524533756" watchObservedRunningTime="2026-03-20 15:42:41.063494271 +0000 UTC m=+290.486197112" Mar 20 15:42:41 crc kubenswrapper[4813]: I0320 15:42:41.063997 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qcm7m" podStartSLOduration=3.520918875 podStartE2EDuration="59.063990744s" podCreationTimestamp="2026-03-20 15:41:42 +0000 UTC" firstStartedPulling="2026-03-20 15:41:44.26600559 +0000 UTC m=+233.688708441" lastFinishedPulling="2026-03-20 15:42:39.809077469 +0000 UTC m=+289.231780310" observedRunningTime="2026-03-20 15:42:41.062781032 +0000 UTC m=+290.485483893" watchObservedRunningTime="2026-03-20 15:42:41.063990744 +0000 UTC m=+290.486693585" Mar 20 15:42:41 crc kubenswrapper[4813]: I0320 15:42:41.738882 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8bb745455-qql7f"] Mar 20 15:42:41 crc kubenswrapper[4813]: I0320 15:42:41.739308 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" podUID="59f6ed7a-30de-4a22-a639-4276ccdb02e5" containerName="controller-manager" containerID="cri-o://a28889b5c14bdb28d1590d9dfe63e862a27fc558eb6cf8d4401543e64410fad4" gracePeriod=30 Mar 20 15:42:41 crc kubenswrapper[4813]: I0320 15:42:41.752784 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv"] Mar 20 15:42:41 crc kubenswrapper[4813]: I0320 15:42:41.752988 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" podUID="fd373034-7e5f-48a8-b883-b2e4a58a45da" containerName="route-controller-manager" containerID="cri-o://3f79864bdcf199d27231e0a5a491ebd440e9a90c5219acbe9755425c14c9c598" gracePeriod=30 Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.054879 4813 generic.go:334] "Generic (PLEG): container finished" podID="fd373034-7e5f-48a8-b883-b2e4a58a45da" containerID="3f79864bdcf199d27231e0a5a491ebd440e9a90c5219acbe9755425c14c9c598" exitCode=0 Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.054942 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" event={"ID":"fd373034-7e5f-48a8-b883-b2e4a58a45da","Type":"ContainerDied","Data":"3f79864bdcf199d27231e0a5a491ebd440e9a90c5219acbe9755425c14c9c598"} Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.056244 4813 generic.go:334] "Generic (PLEG): container finished" podID="59f6ed7a-30de-4a22-a639-4276ccdb02e5" containerID="a28889b5c14bdb28d1590d9dfe63e862a27fc558eb6cf8d4401543e64410fad4" exitCode=0 Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.056297 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" event={"ID":"59f6ed7a-30de-4a22-a639-4276ccdb02e5","Type":"ContainerDied","Data":"a28889b5c14bdb28d1590d9dfe63e862a27fc558eb6cf8d4401543e64410fad4"} Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.318557 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.351510 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.473297 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbjs9\" (UniqueName: \"kubernetes.io/projected/59f6ed7a-30de-4a22-a639-4276ccdb02e5-kube-api-access-vbjs9\") pod \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.473338 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-config\") pod \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.473380 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f6ed7a-30de-4a22-a639-4276ccdb02e5-serving-cert\") pod \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.473405 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd373034-7e5f-48a8-b883-b2e4a58a45da-serving-cert\") pod \"fd373034-7e5f-48a8-b883-b2e4a58a45da\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.473429 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-config\") pod \"fd373034-7e5f-48a8-b883-b2e4a58a45da\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.473448 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-client-ca\") pod \"fd373034-7e5f-48a8-b883-b2e4a58a45da\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.474102 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-client-ca" (OuterVolumeSpecName: "client-ca") pod "fd373034-7e5f-48a8-b883-b2e4a58a45da" (UID: "fd373034-7e5f-48a8-b883-b2e4a58a45da"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.474179 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-config" (OuterVolumeSpecName: "config") pod "fd373034-7e5f-48a8-b883-b2e4a58a45da" (UID: "fd373034-7e5f-48a8-b883-b2e4a58a45da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.474232 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czrzr\" (UniqueName: \"kubernetes.io/projected/fd373034-7e5f-48a8-b883-b2e4a58a45da-kube-api-access-czrzr\") pod \"fd373034-7e5f-48a8-b883-b2e4a58a45da\" (UID: \"fd373034-7e5f-48a8-b883-b2e4a58a45da\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.474341 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-proxy-ca-bundles\") pod \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.474609 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-config" (OuterVolumeSpecName: "config") pod "59f6ed7a-30de-4a22-a639-4276ccdb02e5" (UID: "59f6ed7a-30de-4a22-a639-4276ccdb02e5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.474713 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "59f6ed7a-30de-4a22-a639-4276ccdb02e5" (UID: "59f6ed7a-30de-4a22-a639-4276ccdb02e5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.474787 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-client-ca\") pod \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\" (UID: \"59f6ed7a-30de-4a22-a639-4276ccdb02e5\") " Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.475128 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-client-ca" (OuterVolumeSpecName: "client-ca") pod "59f6ed7a-30de-4a22-a639-4276ccdb02e5" (UID: "59f6ed7a-30de-4a22-a639-4276ccdb02e5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.475443 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.475458 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.475471 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f6ed7a-30de-4a22-a639-4276ccdb02e5-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.475505 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.475518 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fd373034-7e5f-48a8-b883-b2e4a58a45da-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.478425 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59f6ed7a-30de-4a22-a639-4276ccdb02e5-kube-api-access-vbjs9" (OuterVolumeSpecName: "kube-api-access-vbjs9") pod "59f6ed7a-30de-4a22-a639-4276ccdb02e5" (UID: "59f6ed7a-30de-4a22-a639-4276ccdb02e5"). InnerVolumeSpecName "kube-api-access-vbjs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.478551 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd373034-7e5f-48a8-b883-b2e4a58a45da-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fd373034-7e5f-48a8-b883-b2e4a58a45da" (UID: "fd373034-7e5f-48a8-b883-b2e4a58a45da"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.478572 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59f6ed7a-30de-4a22-a639-4276ccdb02e5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "59f6ed7a-30de-4a22-a639-4276ccdb02e5" (UID: "59f6ed7a-30de-4a22-a639-4276ccdb02e5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.478683 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd373034-7e5f-48a8-b883-b2e4a58a45da-kube-api-access-czrzr" (OuterVolumeSpecName: "kube-api-access-czrzr") pod "fd373034-7e5f-48a8-b883-b2e4a58a45da" (UID: "fd373034-7e5f-48a8-b883-b2e4a58a45da"). InnerVolumeSpecName "kube-api-access-czrzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.577522 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbjs9\" (UniqueName: \"kubernetes.io/projected/59f6ed7a-30de-4a22-a639-4276ccdb02e5-kube-api-access-vbjs9\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.577585 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f6ed7a-30de-4a22-a639-4276ccdb02e5-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.577604 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd373034-7e5f-48a8-b883-b2e4a58a45da-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.577623 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czrzr\" (UniqueName: \"kubernetes.io/projected/fd373034-7e5f-48a8-b883-b2e4a58a45da-kube-api-access-czrzr\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.647468 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.647552 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.691729 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.803603 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.803653 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.869427 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.996510 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:42:42 crc kubenswrapper[4813]: I0320 15:42:42.996572 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.048142 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.064199 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" event={"ID":"fd373034-7e5f-48a8-b883-b2e4a58a45da","Type":"ContainerDied","Data":"6ed8e9e68147f42171c47c0f952d925455ad2f2d0c400e7ed80c8c01894f04b5"} Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.064845 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.065185 4813 scope.go:117] "RemoveContainer" containerID="3f79864bdcf199d27231e0a5a491ebd440e9a90c5219acbe9755425c14c9c598" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.092619 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" event={"ID":"59f6ed7a-30de-4a22-a639-4276ccdb02e5","Type":"ContainerDied","Data":"8c468186d0de64548c093097d40045e6c1a39bb3d8b2acb503d21fe57b78247b"} Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.092729 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8bb745455-qql7f" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.099856 4813 scope.go:117] "RemoveContainer" containerID="a28889b5c14bdb28d1590d9dfe63e862a27fc558eb6cf8d4401543e64410fad4" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.116887 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv"] Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.129162 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754b5d84d9-4nqtv"] Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.134062 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8bb745455-qql7f"] Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.137820 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-8bb745455-qql7f"] Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.221787 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.221848 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.274156 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59f6ed7a-30de-4a22-a639-4276ccdb02e5" path="/var/lib/kubelet/pods/59f6ed7a-30de-4a22-a639-4276ccdb02e5/volumes" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.276586 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd373034-7e5f-48a8-b883-b2e4a58a45da" path="/var/lib/kubelet/pods/fd373034-7e5f-48a8-b883-b2e4a58a45da/volumes" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.286560 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.416466 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f59db95fb-jt24h"] Mar 20 15:42:43 crc kubenswrapper[4813]: E0320 15:42:43.416763 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd373034-7e5f-48a8-b883-b2e4a58a45da" containerName="route-controller-manager" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.416816 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd373034-7e5f-48a8-b883-b2e4a58a45da" containerName="route-controller-manager" Mar 20 15:42:43 crc kubenswrapper[4813]: E0320 15:42:43.416838 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="extract-content" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.416849 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="extract-content" Mar 20 15:42:43 crc kubenswrapper[4813]: E0320 15:42:43.416862 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="extract-utilities" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.416882 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="extract-utilities" Mar 20 15:42:43 crc kubenswrapper[4813]: E0320 15:42:43.416897 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59f6ed7a-30de-4a22-a639-4276ccdb02e5" containerName="controller-manager" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.416907 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="59f6ed7a-30de-4a22-a639-4276ccdb02e5" containerName="controller-manager" Mar 20 15:42:43 crc kubenswrapper[4813]: E0320 15:42:43.416927 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="registry-server" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.416936 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="registry-server" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.417079 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd373034-7e5f-48a8-b883-b2e4a58a45da" containerName="route-controller-manager" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.417099 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="59f6ed7a-30de-4a22-a639-4276ccdb02e5" containerName="controller-manager" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.417115 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f499bfc2-7181-42d0-8e36-16bba273f3f5" containerName="registry-server" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.417541 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.420959 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl"] Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.421254 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.422164 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.422220 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.422448 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.423427 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.423689 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.428274 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.428815 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.430470 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.431920 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.432261 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.432337 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.432897 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.434367 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f59db95fb-jt24h"] Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.434931 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.439033 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl"] Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496016 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-client-ca\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496065 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-client-ca\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496085 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b602981-d371-41c8-a445-8cb07dc1553e-serving-cert\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496109 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-proxy-ca-bundles\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496290 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-config\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496455 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ks2q\" (UniqueName: \"kubernetes.io/projected/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-kube-api-access-5ks2q\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496481 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-serving-cert\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496526 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvj6c\" (UniqueName: \"kubernetes.io/projected/2b602981-d371-41c8-a445-8cb07dc1553e-kube-api-access-kvj6c\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.496567 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-config\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597703 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ks2q\" (UniqueName: \"kubernetes.io/projected/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-kube-api-access-5ks2q\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597748 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-serving-cert\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597780 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvj6c\" (UniqueName: \"kubernetes.io/projected/2b602981-d371-41c8-a445-8cb07dc1553e-kube-api-access-kvj6c\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597811 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-config\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597835 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-client-ca\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597865 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-client-ca\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597890 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b602981-d371-41c8-a445-8cb07dc1553e-serving-cert\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597919 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-proxy-ca-bundles\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.597986 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-config\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.599592 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-client-ca\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.599630 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-config\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.600215 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-proxy-ca-bundles\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.600238 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-client-ca\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.600626 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-config\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.605215 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b602981-d371-41c8-a445-8cb07dc1553e-serving-cert\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.605318 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-serving-cert\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.621761 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvj6c\" (UniqueName: \"kubernetes.io/projected/2b602981-d371-41c8-a445-8cb07dc1553e-kube-api-access-kvj6c\") pod \"route-controller-manager-744645547b-cgfcl\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.624475 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ks2q\" (UniqueName: \"kubernetes.io/projected/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-kube-api-access-5ks2q\") pod \"controller-manager-7f59db95fb-jt24h\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.738833 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.745507 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:43 crc kubenswrapper[4813]: I0320 15:42:43.986915 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl"] Mar 20 15:42:44 crc kubenswrapper[4813]: I0320 15:42:44.046578 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f59db95fb-jt24h"] Mar 20 15:42:44 crc kubenswrapper[4813]: W0320 15:42:44.087246 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e6b525b_9ba3_4d8f_aaf7_178e75265b30.slice/crio-bb49632ca83643c2f01a9c0d49898948992da8f88854ef280991b875c96b8bf0 WatchSource:0}: Error finding container bb49632ca83643c2f01a9c0d49898948992da8f88854ef280991b875c96b8bf0: Status 404 returned error can't find the container with id bb49632ca83643c2f01a9c0d49898948992da8f88854ef280991b875c96b8bf0 Mar 20 15:42:44 crc kubenswrapper[4813]: I0320 15:42:44.100420 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" event={"ID":"2b602981-d371-41c8-a445-8cb07dc1553e","Type":"ContainerStarted","Data":"de931f7a3c8a8b576c53f29506a7cb71cfc799e283b23a31bbe9cb140a9f47e3"} Mar 20 15:42:44 crc kubenswrapper[4813]: I0320 15:42:44.103430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" event={"ID":"3e6b525b-9ba3-4d8f-aaf7-178e75265b30","Type":"ContainerStarted","Data":"bb49632ca83643c2f01a9c0d49898948992da8f88854ef280991b875c96b8bf0"} Mar 20 15:42:45 crc kubenswrapper[4813]: I0320 15:42:45.797036 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:42:45 crc kubenswrapper[4813]: I0320 15:42:45.797777 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:42:45 crc kubenswrapper[4813]: I0320 15:42:45.867598 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:42:46 crc kubenswrapper[4813]: I0320 15:42:46.177990 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:42:46 crc kubenswrapper[4813]: I0320 15:42:46.197614 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:42:46 crc kubenswrapper[4813]: I0320 15:42:46.197686 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:42:46 crc kubenswrapper[4813]: I0320 15:42:46.270224 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:42:47 crc kubenswrapper[4813]: I0320 15:42:47.129240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" event={"ID":"2b602981-d371-41c8-a445-8cb07dc1553e","Type":"ContainerStarted","Data":"c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1"} Mar 20 15:42:47 crc kubenswrapper[4813]: I0320 15:42:47.130647 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" event={"ID":"3e6b525b-9ba3-4d8f-aaf7-178e75265b30","Type":"ContainerStarted","Data":"7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8"} Mar 20 15:42:47 crc kubenswrapper[4813]: I0320 15:42:47.167344 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:42:48 crc kubenswrapper[4813]: I0320 15:42:48.161312 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" podStartSLOduration=7.161285026 podStartE2EDuration="7.161285026s" podCreationTimestamp="2026-03-20 15:42:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:48.15882813 +0000 UTC m=+297.581531071" watchObservedRunningTime="2026-03-20 15:42:48.161285026 +0000 UTC m=+297.583987907" Mar 20 15:42:48 crc kubenswrapper[4813]: I0320 15:42:48.190754 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" podStartSLOduration=7.190725605 podStartE2EDuration="7.190725605s" podCreationTimestamp="2026-03-20 15:42:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:42:48.183050252 +0000 UTC m=+297.605753153" watchObservedRunningTime="2026-03-20 15:42:48.190725605 +0000 UTC m=+297.613428476" Mar 20 15:42:48 crc kubenswrapper[4813]: I0320 15:42:48.199256 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rxzdv"] Mar 20 15:42:49 crc kubenswrapper[4813]: I0320 15:42:49.678765 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rxzdv" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="registry-server" containerID="cri-o://27979f5c493e980100999afa9bca3ec362f55ab8999cad5bcb913459812dd4cb" gracePeriod=2 Mar 20 15:42:50 crc kubenswrapper[4813]: I0320 15:42:50.687336 4813 generic.go:334] "Generic (PLEG): container finished" podID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerID="27979f5c493e980100999afa9bca3ec362f55ab8999cad5bcb913459812dd4cb" exitCode=0 Mar 20 15:42:50 crc kubenswrapper[4813]: I0320 15:42:50.687397 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rxzdv" event={"ID":"dafe7a17-1180-4e97-a6bc-309258d024b6","Type":"ContainerDied","Data":"27979f5c493e980100999afa9bca3ec362f55ab8999cad5bcb913459812dd4cb"} Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.008890 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.120548 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-utilities\") pod \"dafe7a17-1180-4e97-a6bc-309258d024b6\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.120650 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-catalog-content\") pod \"dafe7a17-1180-4e97-a6bc-309258d024b6\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.120693 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdmrv\" (UniqueName: \"kubernetes.io/projected/dafe7a17-1180-4e97-a6bc-309258d024b6-kube-api-access-cdmrv\") pod \"dafe7a17-1180-4e97-a6bc-309258d024b6\" (UID: \"dafe7a17-1180-4e97-a6bc-309258d024b6\") " Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.121313 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-utilities" (OuterVolumeSpecName: "utilities") pod "dafe7a17-1180-4e97-a6bc-309258d024b6" (UID: "dafe7a17-1180-4e97-a6bc-309258d024b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.126793 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dafe7a17-1180-4e97-a6bc-309258d024b6-kube-api-access-cdmrv" (OuterVolumeSpecName: "kube-api-access-cdmrv") pod "dafe7a17-1180-4e97-a6bc-309258d024b6" (UID: "dafe7a17-1180-4e97-a6bc-309258d024b6"). InnerVolumeSpecName "kube-api-access-cdmrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.222558 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.223307 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdmrv\" (UniqueName: \"kubernetes.io/projected/dafe7a17-1180-4e97-a6bc-309258d024b6-kube-api-access-cdmrv\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.241751 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dafe7a17-1180-4e97-a6bc-309258d024b6" (UID: "dafe7a17-1180-4e97-a6bc-309258d024b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.325190 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dafe7a17-1180-4e97-a6bc-309258d024b6-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.694710 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rxzdv" event={"ID":"dafe7a17-1180-4e97-a6bc-309258d024b6","Type":"ContainerDied","Data":"aa870f66879f1ebab5792790b6f646b17a7d640065e0fb919d999a090e80ae7c"} Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.694794 4813 scope.go:117] "RemoveContainer" containerID="27979f5c493e980100999afa9bca3ec362f55ab8999cad5bcb913459812dd4cb" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.694919 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rxzdv" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.713330 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rxzdv"] Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.716398 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rxzdv"] Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.730118 4813 scope.go:117] "RemoveContainer" containerID="801ca882191d62f73f5652a6fbdbe21acdae302746fc657ab4a0a81a8fb0daa9" Mar 20 15:42:51 crc kubenswrapper[4813]: I0320 15:42:51.753659 4813 scope.go:117] "RemoveContainer" containerID="adaa775ad9f768fb147508cf639930fd0f92b3899750ab53d433ed7bec87cd95" Mar 20 15:42:52 crc kubenswrapper[4813]: I0320 15:42:52.688074 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:42:52 crc kubenswrapper[4813]: I0320 15:42:52.847385 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:42:53 crc kubenswrapper[4813]: I0320 15:42:53.033332 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:42:53 crc kubenswrapper[4813]: I0320 15:42:53.262237 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:42:53 crc kubenswrapper[4813]: I0320 15:42:53.272392 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" path="/var/lib/kubelet/pods/dafe7a17-1180-4e97-a6bc-309258d024b6/volumes" Mar 20 15:42:53 crc kubenswrapper[4813]: I0320 15:42:53.739875 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:53 crc kubenswrapper[4813]: I0320 15:42:53.743788 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:42:53 crc kubenswrapper[4813]: I0320 15:42:53.746217 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:53 crc kubenswrapper[4813]: I0320 15:42:53.750374 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:42:54 crc kubenswrapper[4813]: I0320 15:42:54.888507 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lgf4j"] Mar 20 15:42:56 crc kubenswrapper[4813]: I0320 15:42:56.787309 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4v5fd"] Mar 20 15:42:56 crc kubenswrapper[4813]: I0320 15:42:56.787704 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4v5fd" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="registry-server" containerID="cri-o://4c73b30703cdeae11d7cbf8e5128667ad84ba59384919f73dc2bd738ee5ce861" gracePeriod=2 Mar 20 15:42:56 crc kubenswrapper[4813]: I0320 15:42:56.986740 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qcm7m"] Mar 20 15:42:56 crc kubenswrapper[4813]: I0320 15:42:56.986969 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qcm7m" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="registry-server" containerID="cri-o://188e1ef66c7c337c50af03613a24b1b174fb2329d5352a64e741b772ba608ef6" gracePeriod=2 Mar 20 15:42:58 crc kubenswrapper[4813]: I0320 15:42:58.740423 4813 generic.go:334] "Generic (PLEG): container finished" podID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerID="4c73b30703cdeae11d7cbf8e5128667ad84ba59384919f73dc2bd738ee5ce861" exitCode=0 Mar 20 15:42:58 crc kubenswrapper[4813]: I0320 15:42:58.740503 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4v5fd" event={"ID":"ddf7608c-e3c8-40c2-a92e-98605cd5f35a","Type":"ContainerDied","Data":"4c73b30703cdeae11d7cbf8e5128667ad84ba59384919f73dc2bd738ee5ce861"} Mar 20 15:42:58 crc kubenswrapper[4813]: I0320 15:42:58.744158 4813 generic.go:334] "Generic (PLEG): container finished" podID="deacbab9-8fbd-4707-8098-5d748d77caae" containerID="188e1ef66c7c337c50af03613a24b1b174fb2329d5352a64e741b772ba608ef6" exitCode=0 Mar 20 15:42:58 crc kubenswrapper[4813]: I0320 15:42:58.744242 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcm7m" event={"ID":"deacbab9-8fbd-4707-8098-5d748d77caae","Type":"ContainerDied","Data":"188e1ef66c7c337c50af03613a24b1b174fb2329d5352a64e741b772ba608ef6"} Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.463228 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.528929 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.645777 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-catalog-content\") pod \"deacbab9-8fbd-4707-8098-5d748d77caae\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.645824 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qjfj\" (UniqueName: \"kubernetes.io/projected/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-kube-api-access-2qjfj\") pod \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.645861 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-catalog-content\") pod \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.645880 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjnhg\" (UniqueName: \"kubernetes.io/projected/deacbab9-8fbd-4707-8098-5d748d77caae-kube-api-access-bjnhg\") pod \"deacbab9-8fbd-4707-8098-5d748d77caae\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.645918 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-utilities\") pod \"deacbab9-8fbd-4707-8098-5d748d77caae\" (UID: \"deacbab9-8fbd-4707-8098-5d748d77caae\") " Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.646775 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-utilities" (OuterVolumeSpecName: "utilities") pod "deacbab9-8fbd-4707-8098-5d748d77caae" (UID: "deacbab9-8fbd-4707-8098-5d748d77caae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.646815 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-utilities\") pod \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\" (UID: \"ddf7608c-e3c8-40c2-a92e-98605cd5f35a\") " Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.647110 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.647303 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-utilities" (OuterVolumeSpecName: "utilities") pod "ddf7608c-e3c8-40c2-a92e-98605cd5f35a" (UID: "ddf7608c-e3c8-40c2-a92e-98605cd5f35a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.652342 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-kube-api-access-2qjfj" (OuterVolumeSpecName: "kube-api-access-2qjfj") pod "ddf7608c-e3c8-40c2-a92e-98605cd5f35a" (UID: "ddf7608c-e3c8-40c2-a92e-98605cd5f35a"). InnerVolumeSpecName "kube-api-access-2qjfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.652366 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deacbab9-8fbd-4707-8098-5d748d77caae-kube-api-access-bjnhg" (OuterVolumeSpecName: "kube-api-access-bjnhg") pod "deacbab9-8fbd-4707-8098-5d748d77caae" (UID: "deacbab9-8fbd-4707-8098-5d748d77caae"). InnerVolumeSpecName "kube-api-access-bjnhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.697087 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "deacbab9-8fbd-4707-8098-5d748d77caae" (UID: "deacbab9-8fbd-4707-8098-5d748d77caae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.702229 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ddf7608c-e3c8-40c2-a92e-98605cd5f35a" (UID: "ddf7608c-e3c8-40c2-a92e-98605cd5f35a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.752783 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.752827 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjnhg\" (UniqueName: \"kubernetes.io/projected/deacbab9-8fbd-4707-8098-5d748d77caae-kube-api-access-bjnhg\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.752838 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.752847 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/deacbab9-8fbd-4707-8098-5d748d77caae-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.752858 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qjfj\" (UniqueName: \"kubernetes.io/projected/ddf7608c-e3c8-40c2-a92e-98605cd5f35a-kube-api-access-2qjfj\") on node \"crc\" DevicePath \"\"" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.756201 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4v5fd" event={"ID":"ddf7608c-e3c8-40c2-a92e-98605cd5f35a","Type":"ContainerDied","Data":"a7dbe758427684484463248361d018c114c18602a06c9b81882555919afb6adc"} Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.756347 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4v5fd" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.759641 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcm7m" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.760745 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcm7m" event={"ID":"deacbab9-8fbd-4707-8098-5d748d77caae","Type":"ContainerDied","Data":"b08c564b2e3c550b6ffa72ed30e06f579f11b473e1cfb4f710463fe107501e34"} Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.760797 4813 scope.go:117] "RemoveContainer" containerID="4c73b30703cdeae11d7cbf8e5128667ad84ba59384919f73dc2bd738ee5ce861" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.776740 4813 scope.go:117] "RemoveContainer" containerID="fffe450bf0afe561408acbf42de5f1e23a0c43f43aff7b4c61f0e0b87bd37e52" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.795129 4813 scope.go:117] "RemoveContainer" containerID="91712df359be7aaf64ca7613851142844c18ee10ce667d7d3065e01eb7ec901b" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.799569 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qcm7m"] Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.805101 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qcm7m"] Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.808907 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4v5fd"] Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.809219 4813 scope.go:117] "RemoveContainer" containerID="188e1ef66c7c337c50af03613a24b1b174fb2329d5352a64e741b772ba608ef6" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.812819 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4v5fd"] Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.822074 4813 scope.go:117] "RemoveContainer" containerID="6b23d32999057bc85298799baa3dda74e73cc4e699811b701fc53821d7bb57d2" Mar 20 15:42:59 crc kubenswrapper[4813]: I0320 15:42:59.834966 4813 scope.go:117] "RemoveContainer" containerID="c9f24b5a705901f9ae9fe4c5dcdb5b9fdf522853c7d18831f0fcb8f3dd42407f" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.045455 4813 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.046106 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967" gracePeriod=15 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.046167 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872" gracePeriod=15 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.046267 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b" gracePeriod=15 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.046337 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117" gracePeriod=15 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.046395 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c" gracePeriod=15 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.051889 4813 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052441 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="extract-utilities" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052521 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="extract-utilities" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052551 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052567 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052603 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="extract-content" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052622 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="extract-content" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052651 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052669 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052687 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052702 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052726 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052742 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052792 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052810 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052829 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="extract-content" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052845 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="extract-content" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052871 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052887 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052904 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052919 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052938 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="extract-utilities" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052954 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="extract-utilities" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.052978 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="extract-utilities" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.052993 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="extract-utilities" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.053020 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053035 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.053060 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053075 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.053095 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053109 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.053131 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053148 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.053168 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053238 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.053257 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="extract-content" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053272 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="extract-content" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053514 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053547 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053572 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053589 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053607 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053625 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="dafe7a17-1180-4e97-a6bc-309258d024b6" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053648 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053670 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053686 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" containerName="registry-server" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053706 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Mar 20 15:43:01 crc kubenswrapper[4813]: E0320 15:43:01.053905 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.053923 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.054101 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.054424 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.064754 4813 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.066074 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.073538 4813 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.173973 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.174056 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.174085 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.174138 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.174166 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.174203 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.174223 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.174257 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.274324 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddf7608c-e3c8-40c2-a92e-98605cd5f35a" path="/var/lib/kubelet/pods/ddf7608c-e3c8-40c2-a92e-98605cd5f35a/volumes" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275346 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275401 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275445 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275453 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275477 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275525 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275533 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275570 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275612 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275679 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deacbab9-8fbd-4707-8098-5d748d77caae" path="/var/lib/kubelet/pods/deacbab9-8fbd-4707-8098-5d748d77caae/volumes" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275692 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275700 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275719 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275758 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275782 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275851 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.275898 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.775015 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.777415 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.780460 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872" exitCode=0 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.780540 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b" exitCode=0 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.780571 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117" exitCode=0 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.780592 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c" exitCode=2 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.780616 4813 scope.go:117] "RemoveContainer" containerID="1a39e6bd7b6162ce7fd28a9b58b45e858313d51c8cad6f266e3d6e1dcdbe6c06" Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.792300 4813 generic.go:334] "Generic (PLEG): container finished" podID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" containerID="e145ec28df6cda1f111c2745fdfcd86c4d33d2fdcf2d6113a48957f7ef2830c5" exitCode=0 Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.792825 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e5b57c2d-92a5-479c-bcb3-5382a04091fc","Type":"ContainerDied","Data":"e145ec28df6cda1f111c2745fdfcd86c4d33d2fdcf2d6113a48957f7ef2830c5"} Mar 20 15:43:01 crc kubenswrapper[4813]: I0320 15:43:01.794317 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:02 crc kubenswrapper[4813]: I0320 15:43:02.804784 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.254860 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.257020 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.409926 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kubelet-dir\") pod \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.410061 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-var-lock\") pod \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.410140 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e5b57c2d-92a5-479c-bcb3-5382a04091fc" (UID: "e5b57c2d-92a5-479c-bcb3-5382a04091fc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.410163 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-var-lock" (OuterVolumeSpecName: "var-lock") pod "e5b57c2d-92a5-479c-bcb3-5382a04091fc" (UID: "e5b57c2d-92a5-479c-bcb3-5382a04091fc"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.410189 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kube-api-access\") pod \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\" (UID: \"e5b57c2d-92a5-479c-bcb3-5382a04091fc\") " Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.410692 4813 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-var-lock\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.410723 4813 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.418575 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.418729 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e5b57c2d-92a5-479c-bcb3-5382a04091fc" (UID: "e5b57c2d-92a5-479c-bcb3-5382a04091fc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.419347 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.419859 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.420130 4813 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.511683 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b57c2d-92a5-479c-bcb3-5382a04091fc-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612202 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612255 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612306 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612309 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612401 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612571 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612712 4813 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612735 4813 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.612753 4813 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.816017 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.816814 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967" exitCode=0 Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.816931 4813 scope.go:117] "RemoveContainer" containerID="0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.817015 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.818558 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e5b57c2d-92a5-479c-bcb3-5382a04091fc","Type":"ContainerDied","Data":"b13883e861a95f933d0a9fba973bc6663c217ba7ae38025ed8c36e7f1ad59825"} Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.818606 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b13883e861a95f933d0a9fba973bc6663c217ba7ae38025ed8c36e7f1ad59825" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.818682 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.833873 4813 scope.go:117] "RemoveContainer" containerID="cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.843215 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.843336 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.843410 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.844331 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.844441 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2" gracePeriod=600 Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.845169 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.845818 4813 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.848529 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/events/machine-config-daemon-l8d6t.189e96feafaa9a96\": dial tcp 38.102.83.181:6443: connect: connection refused" event=< Mar 20 15:43:03 crc kubenswrapper[4813]: &Event{ObjectMeta:{machine-config-daemon-l8d6t.189e96feafaa9a96 openshift-machine-config-operator 29571 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:machine-config-daemon-l8d6t,UID:dbc04883-b38a-4b6a-bee4-f6804c8aad94,APIVersion:v1,ResourceVersion:26846,FieldPath:spec.containers{machine-config-daemon},},Reason:ProbeError,Message:Liveness probe error: Get "http://127.0.0.1:8798/health": dial tcp 127.0.0.1:8798: connect: connection refused Mar 20 15:43:03 crc kubenswrapper[4813]: body: Mar 20 15:43:03 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:42:03 +0000 UTC,LastTimestamp:2026-03-20 15:43:03.843314105 +0000 UTC m=+313.266016976,Count:3,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:43:03 crc kubenswrapper[4813]: > Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.849523 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.850114 4813 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.854306 4813 scope.go:117] "RemoveContainer" containerID="584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.857571 4813 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.858060 4813 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.858690 4813 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.859630 4813 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.860444 4813 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.860547 4813 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.861186 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="200ms" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.876906 4813 scope.go:117] "RemoveContainer" containerID="d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.900384 4813 scope.go:117] "RemoveContainer" containerID="e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.921875 4813 scope.go:117] "RemoveContainer" containerID="2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.955606 4813 scope.go:117] "RemoveContainer" containerID="0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.956022 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\": container with ID starting with 0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872 not found: ID does not exist" containerID="0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.956076 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872"} err="failed to get container status \"0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\": rpc error: code = NotFound desc = could not find container \"0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872\": container with ID starting with 0169f703bbbe7e66d0d320b9754110c9939cc6f31b7b19f338988ccdd5340872 not found: ID does not exist" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.956111 4813 scope.go:117] "RemoveContainer" containerID="cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.956384 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\": container with ID starting with cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b not found: ID does not exist" containerID="cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.956420 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b"} err="failed to get container status \"cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\": rpc error: code = NotFound desc = could not find container \"cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b\": container with ID starting with cf16f455e094c0606c751a84ccfb3558ddf91a2c1eded39e239ef49759dd372b not found: ID does not exist" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.956444 4813 scope.go:117] "RemoveContainer" containerID="584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.956748 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\": container with ID starting with 584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117 not found: ID does not exist" containerID="584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.956786 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117"} err="failed to get container status \"584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\": rpc error: code = NotFound desc = could not find container \"584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117\": container with ID starting with 584eb15353f7c1fd36743b868cd988310f02e859d92ba568aeb0f8f355b7e117 not found: ID does not exist" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.956811 4813 scope.go:117] "RemoveContainer" containerID="d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.957056 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\": container with ID starting with d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c not found: ID does not exist" containerID="d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.957092 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c"} err="failed to get container status \"d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\": rpc error: code = NotFound desc = could not find container \"d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c\": container with ID starting with d4f6f9ae60822e1b82501b5a6d75675f0c8427f43da3527a67222764f7d9674c not found: ID does not exist" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.957114 4813 scope.go:117] "RemoveContainer" containerID="e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.957586 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\": container with ID starting with e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967 not found: ID does not exist" containerID="e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.957671 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967"} err="failed to get container status \"e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\": rpc error: code = NotFound desc = could not find container \"e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967\": container with ID starting with e6e24a0acba707822290186469cb332ea3963af47a5496dd1cc39b54d75a0967 not found: ID does not exist" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.957712 4813 scope.go:117] "RemoveContainer" containerID="2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3" Mar 20 15:43:03 crc kubenswrapper[4813]: E0320 15:43:03.958069 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\": container with ID starting with 2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3 not found: ID does not exist" containerID="2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3" Mar 20 15:43:03 crc kubenswrapper[4813]: I0320 15:43:03.958130 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3"} err="failed to get container status \"2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\": rpc error: code = NotFound desc = could not find container \"2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3\": container with ID starting with 2426d137e89aad0fa7bb7f22e7d8f3cf24c4340f037e2fbe58368e9db79c9da3 not found: ID does not exist" Mar 20 15:43:04 crc kubenswrapper[4813]: E0320 15:43:04.063000 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="400ms" Mar 20 15:43:04 crc kubenswrapper[4813]: E0320 15:43:04.464287 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="800ms" Mar 20 15:43:04 crc kubenswrapper[4813]: I0320 15:43:04.830003 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2" exitCode=0 Mar 20 15:43:04 crc kubenswrapper[4813]: I0320 15:43:04.830051 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2"} Mar 20 15:43:04 crc kubenswrapper[4813]: I0320 15:43:04.830430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"93237d0a8b83a96070f53085d50fd6e82323eb4f93a8d55d7840bf40381d06aa"} Mar 20 15:43:04 crc kubenswrapper[4813]: I0320 15:43:04.831235 4813 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:04 crc kubenswrapper[4813]: I0320 15:43:04.831671 4813 status_manager.go:851] "Failed to get status for pod" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-l8d6t\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:04 crc kubenswrapper[4813]: I0320 15:43:04.831929 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:05 crc kubenswrapper[4813]: E0320 15:43:05.265715 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="1.6s" Mar 20 15:43:05 crc kubenswrapper[4813]: I0320 15:43:05.275802 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Mar 20 15:43:06 crc kubenswrapper[4813]: E0320 15:43:06.115863 4813 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:06 crc kubenswrapper[4813]: I0320 15:43:06.116431 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:06 crc kubenswrapper[4813]: W0320 15:43:06.147642 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-b5e73710dc5631dac04bbd98bad666c540a8e41a202d0a581ae202d60e71d31a WatchSource:0}: Error finding container b5e73710dc5631dac04bbd98bad666c540a8e41a202d0a581ae202d60e71d31a: Status 404 returned error can't find the container with id b5e73710dc5631dac04bbd98bad666c540a8e41a202d0a581ae202d60e71d31a Mar 20 15:43:06 crc kubenswrapper[4813]: I0320 15:43:06.845939 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69"} Mar 20 15:43:06 crc kubenswrapper[4813]: I0320 15:43:06.846547 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"b5e73710dc5631dac04bbd98bad666c540a8e41a202d0a581ae202d60e71d31a"} Mar 20 15:43:06 crc kubenswrapper[4813]: E0320 15:43:06.847352 4813 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:06 crc kubenswrapper[4813]: I0320 15:43:06.847349 4813 status_manager.go:851] "Failed to get status for pod" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-l8d6t\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:06 crc kubenswrapper[4813]: I0320 15:43:06.847964 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:06 crc kubenswrapper[4813]: E0320 15:43:06.866948 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="3.2s" Mar 20 15:43:10 crc kubenswrapper[4813]: E0320 15:43:10.068171 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="6.4s" Mar 20 15:43:11 crc kubenswrapper[4813]: I0320 15:43:11.270753 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:11 crc kubenswrapper[4813]: I0320 15:43:11.271239 4813 status_manager.go:851] "Failed to get status for pod" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-l8d6t\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:13 crc kubenswrapper[4813]: E0320 15:43:13.074206 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/events/machine-config-daemon-l8d6t.189e96feafaa9a96\": dial tcp 38.102.83.181:6443: connect: connection refused" event=< Mar 20 15:43:13 crc kubenswrapper[4813]: &Event{ObjectMeta:{machine-config-daemon-l8d6t.189e96feafaa9a96 openshift-machine-config-operator 29571 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:machine-config-daemon-l8d6t,UID:dbc04883-b38a-4b6a-bee4-f6804c8aad94,APIVersion:v1,ResourceVersion:26846,FieldPath:spec.containers{machine-config-daemon},},Reason:ProbeError,Message:Liveness probe error: Get "http://127.0.0.1:8798/health": dial tcp 127.0.0.1:8798: connect: connection refused Mar 20 15:43:13 crc kubenswrapper[4813]: body: Mar 20 15:43:13 crc kubenswrapper[4813]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 15:42:03 +0000 UTC,LastTimestamp:2026-03-20 15:43:03.843314105 +0000 UTC m=+313.266016976,Count:3,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 15:43:13 crc kubenswrapper[4813]: > Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.899573 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/1.log" Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.901829 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.901887 4813 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="86ccefb1bcc2d04371d91a518875328c145139e697219054fb3e9afb0f30716c" exitCode=1 Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.901920 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"86ccefb1bcc2d04371d91a518875328c145139e697219054fb3e9afb0f30716c"} Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.902504 4813 scope.go:117] "RemoveContainer" containerID="86ccefb1bcc2d04371d91a518875328c145139e697219054fb3e9afb0f30716c" Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.903061 4813 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.903612 4813 status_manager.go:851] "Failed to get status for pod" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-l8d6t\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:14 crc kubenswrapper[4813]: I0320 15:43:14.904102 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:15 crc kubenswrapper[4813]: I0320 15:43:15.912759 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/1.log" Mar 20 15:43:15 crc kubenswrapper[4813]: I0320 15:43:15.915740 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Mar 20 15:43:15 crc kubenswrapper[4813]: I0320 15:43:15.915828 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c62e95459d4caa0d59839eb212a875ccae3590a15a58ff438ef819acfbf3f556"} Mar 20 15:43:15 crc kubenswrapper[4813]: I0320 15:43:15.917083 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:15 crc kubenswrapper[4813]: I0320 15:43:15.917620 4813 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:15 crc kubenswrapper[4813]: I0320 15:43:15.918065 4813 status_manager.go:851] "Failed to get status for pod" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-l8d6t\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.265569 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.266434 4813 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.266840 4813 status_manager.go:851] "Failed to get status for pod" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-l8d6t\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.267177 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.287521 4813 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.287856 4813 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:16 crc kubenswrapper[4813]: E0320 15:43:16.288229 4813 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.288700 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:16 crc kubenswrapper[4813]: W0320 15:43:16.312234 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-a878d2b016f0956bac99bc4b5b672e854bd0a9a6c6dac4b2834fbce50820c64a WatchSource:0}: Error finding container a878d2b016f0956bac99bc4b5b672e854bd0a9a6c6dac4b2834fbce50820c64a: Status 404 returned error can't find the container with id a878d2b016f0956bac99bc4b5b672e854bd0a9a6c6dac4b2834fbce50820c64a Mar 20 15:43:16 crc kubenswrapper[4813]: E0320 15:43:16.468949 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="7s" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.923879 4813 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="dd9048cb90d8675b14d6645a9904da1f5ac5f300034ef758155e9aabc5751e94" exitCode=0 Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.923929 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"dd9048cb90d8675b14d6645a9904da1f5ac5f300034ef758155e9aabc5751e94"} Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.923963 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a878d2b016f0956bac99bc4b5b672e854bd0a9a6c6dac4b2834fbce50820c64a"} Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.924310 4813 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.924329 4813 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:16 crc kubenswrapper[4813]: E0320 15:43:16.924831 4813 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.924832 4813 status_manager.go:851] "Failed to get status for pod" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.925464 4813 status_manager.go:851] "Failed to get status for pod" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-l8d6t\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:16 crc kubenswrapper[4813]: I0320 15:43:16.926051 4813 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.181:6443: connect: connection refused" Mar 20 15:43:17 crc kubenswrapper[4813]: I0320 15:43:17.930746 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2c93c622c400af03dc592816a755bcdd38d612506cee0f28158021e0f4614813"} Mar 20 15:43:17 crc kubenswrapper[4813]: I0320 15:43:17.931063 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fcbd3607ade37667bf55bfeded32596f8c2234208d9aa6d324632296c5981a3a"} Mar 20 15:43:17 crc kubenswrapper[4813]: I0320 15:43:17.931073 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"55f68754c193195d6287b3470070ebacd91608cb4736307ada93b27ab6f98997"} Mar 20 15:43:18 crc kubenswrapper[4813]: I0320 15:43:18.940109 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b5737f529bb6aa52530b7432239bd4e52c6b4b42dcd3508ef79562de74231e5a"} Mar 20 15:43:18 crc kubenswrapper[4813]: I0320 15:43:18.940326 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"100cbb608f08d1eeaa5812dc53200182656d73ddb573e2d44696079d353e6d79"} Mar 20 15:43:18 crc kubenswrapper[4813]: I0320 15:43:18.940373 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:18 crc kubenswrapper[4813]: I0320 15:43:18.940601 4813 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:18 crc kubenswrapper[4813]: I0320 15:43:18.940638 4813 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:19 crc kubenswrapper[4813]: I0320 15:43:19.216181 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:43:19 crc kubenswrapper[4813]: I0320 15:43:19.909100 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" podUID="56e4637c-fec1-435e-87db-4218601b4c45" containerName="oauth-openshift" containerID="cri-o://959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01" gracePeriod=15 Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.394381 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.462800 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mwpx\" (UniqueName: \"kubernetes.io/projected/56e4637c-fec1-435e-87db-4218601b4c45-kube-api-access-9mwpx\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.463117 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-serving-cert\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.463231 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-login\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.463356 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-service-ca\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.463973 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.463993 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.464195 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-cliconfig\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.464313 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-router-certs\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.464409 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-ocp-branding-template\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.464762 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56e4637c-fec1-435e-87db-4218601b4c45-audit-dir\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.464859 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/56e4637c-fec1-435e-87db-4218601b4c45-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.465011 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-error\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.465108 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-audit-policies\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.465199 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-trusted-ca-bundle\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.465279 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-session\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.465371 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-idp-0-file-data\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.465468 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-provider-selection\") pod \"56e4637c-fec1-435e-87db-4218601b4c45\" (UID: \"56e4637c-fec1-435e-87db-4218601b4c45\") " Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.465703 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.466030 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.466244 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.466342 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.466438 4813 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56e4637c-fec1-435e-87db-4218601b4c45-audit-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.466547 4813 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-audit-policies\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.466643 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.468842 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.469034 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.469459 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.469608 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e4637c-fec1-435e-87db-4218601b4c45-kube-api-access-9mwpx" (OuterVolumeSpecName: "kube-api-access-9mwpx") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "kube-api-access-9mwpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.469758 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.469881 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.470733 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.471452 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.471810 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "56e4637c-fec1-435e-87db-4218601b4c45" (UID: "56e4637c-fec1-435e-87db-4218601b4c45"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568172 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568234 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568262 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568285 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568406 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568423 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568437 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568450 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568463 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568499 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568519 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mwpx\" (UniqueName: \"kubernetes.io/projected/56e4637c-fec1-435e-87db-4218601b4c45-kube-api-access-9mwpx\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568534 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.568547 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56e4637c-fec1-435e-87db-4218601b4c45-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.571067 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.571230 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.571358 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.581685 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.581926 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.584381 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.594227 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.594225 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.807432 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.821663 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.833520 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.956334 4813 generic.go:334] "Generic (PLEG): container finished" podID="56e4637c-fec1-435e-87db-4218601b4c45" containerID="959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01" exitCode=0 Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.956373 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.956397 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" event={"ID":"56e4637c-fec1-435e-87db-4218601b4c45","Type":"ContainerDied","Data":"959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01"} Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.956838 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lgf4j" event={"ID":"56e4637c-fec1-435e-87db-4218601b4c45","Type":"ContainerDied","Data":"a5dabc0e88d57b1e3fc2a4eacdec2873fdaf9b3e30e2232b0d091998fa5b95d3"} Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.956870 4813 scope.go:117] "RemoveContainer" containerID="959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.981766 4813 scope.go:117] "RemoveContainer" containerID="959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01" Mar 20 15:43:20 crc kubenswrapper[4813]: E0320 15:43:20.982254 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01\": container with ID starting with 959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01 not found: ID does not exist" containerID="959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01" Mar 20 15:43:20 crc kubenswrapper[4813]: I0320 15:43:20.982290 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01"} err="failed to get container status \"959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01\": rpc error: code = NotFound desc = could not find container \"959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01\": container with ID starting with 959016d1cb78e9da7d692b63ab472c85456f8be84925def5e47416c9e521bc01 not found: ID does not exist" Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.289917 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.290184 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.296810 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:21 crc kubenswrapper[4813]: W0320 15:43:21.366041 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-cf9c1b49c8378c349fb79f99b470c84ea89d0832e652301ff035863f48c78786 WatchSource:0}: Error finding container cf9c1b49c8378c349fb79f99b470c84ea89d0832e652301ff035863f48c78786: Status 404 returned error can't find the container with id cf9c1b49c8378c349fb79f99b470c84ea89d0832e652301ff035863f48c78786 Mar 20 15:43:21 crc kubenswrapper[4813]: W0320 15:43:21.369518 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-d09bd132ffdeaac8bfac9ca2e221cad7edea57d065190e5bacc3a388f958314f WatchSource:0}: Error finding container d09bd132ffdeaac8bfac9ca2e221cad7edea57d065190e5bacc3a388f958314f: Status 404 returned error can't find the container with id d09bd132ffdeaac8bfac9ca2e221cad7edea57d065190e5bacc3a388f958314f Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.717678 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.722348 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.971396 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"060d1f34ae659fadacf89de9c3836356e42cd2f175d27ec67ebb3e726565ad66"} Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.971466 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"29d8d5e42a0088677ea56dae0cd3805baedbcc2cefc7e8e7c3374a7d8594f7d8"} Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.975161 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8b88ad941536fc89f9551f26f130c05ff4447f5c47b249753bc5a1dad5e41183"} Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.975221 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d09bd132ffdeaac8bfac9ca2e221cad7edea57d065190e5bacc3a388f958314f"} Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.981697 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"9c658ccf9ec3b0435960aef4bc55771057570bda1f320aecc048c6f5e32b1122"} Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.981759 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"cf9c1b49c8378c349fb79f99b470c84ea89d0832e652301ff035863f48c78786"} Mar 20 15:43:21 crc kubenswrapper[4813]: I0320 15:43:21.982241 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:43:22 crc kubenswrapper[4813]: I0320 15:43:22.989948 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/0.log" Mar 20 15:43:22 crc kubenswrapper[4813]: I0320 15:43:22.990299 4813 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="8b88ad941536fc89f9551f26f130c05ff4447f5c47b249753bc5a1dad5e41183" exitCode=255 Mar 20 15:43:22 crc kubenswrapper[4813]: I0320 15:43:22.990365 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"8b88ad941536fc89f9551f26f130c05ff4447f5c47b249753bc5a1dad5e41183"} Mar 20 15:43:22 crc kubenswrapper[4813]: I0320 15:43:22.990791 4813 scope.go:117] "RemoveContainer" containerID="8b88ad941536fc89f9551f26f130c05ff4447f5c47b249753bc5a1dad5e41183" Mar 20 15:43:23 crc kubenswrapper[4813]: I0320 15:43:23.949539 4813 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:23 crc kubenswrapper[4813]: I0320 15:43:23.997864 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/0.log" Mar 20 15:43:23 crc kubenswrapper[4813]: I0320 15:43:23.997962 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c69797f22bcdd5efd9fd4bb87e73f9eb55973b0a02b475fa240a55dee17c818b"} Mar 20 15:43:24 crc kubenswrapper[4813]: I0320 15:43:24.005233 4813 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:24 crc kubenswrapper[4813]: I0320 15:43:24.005271 4813 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:24 crc kubenswrapper[4813]: I0320 15:43:24.010626 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:24 crc kubenswrapper[4813]: I0320 15:43:24.028369 4813 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c80a1e1a-c888-433f-9f4c-8b3d12ef1a46" Mar 20 15:43:24 crc kubenswrapper[4813]: E0320 15:43:24.374051 4813 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"audit\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Mar 20 15:43:24 crc kubenswrapper[4813]: E0320 15:43:24.794166 4813 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-router-certs\": Failed to watch *v1.Secret: unknown (get secrets)" logger="UnhandledError" Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.006849 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.007670 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/0.log" Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.007755 4813 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="c69797f22bcdd5efd9fd4bb87e73f9eb55973b0a02b475fa240a55dee17c818b" exitCode=255 Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.007822 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"c69797f22bcdd5efd9fd4bb87e73f9eb55973b0a02b475fa240a55dee17c818b"} Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.007915 4813 scope.go:117] "RemoveContainer" containerID="8b88ad941536fc89f9551f26f130c05ff4447f5c47b249753bc5a1dad5e41183" Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.008263 4813 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.008305 4813 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4645d0fa-057e-498d-8b10-6897ac843624" Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.008497 4813 scope.go:117] "RemoveContainer" containerID="c69797f22bcdd5efd9fd4bb87e73f9eb55973b0a02b475fa240a55dee17c818b" Mar 20 15:43:25 crc kubenswrapper[4813]: E0320 15:43:25.008819 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:43:25 crc kubenswrapper[4813]: I0320 15:43:25.013706 4813 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c80a1e1a-c888-433f-9f4c-8b3d12ef1a46" Mar 20 15:43:26 crc kubenswrapper[4813]: I0320 15:43:26.016535 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Mar 20 15:43:29 crc kubenswrapper[4813]: I0320 15:43:29.224786 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 15:43:33 crc kubenswrapper[4813]: I0320 15:43:33.418348 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Mar 20 15:43:33 crc kubenswrapper[4813]: I0320 15:43:33.793868 4813 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Mar 20 15:43:34 crc kubenswrapper[4813]: I0320 15:43:34.120248 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Mar 20 15:43:34 crc kubenswrapper[4813]: I0320 15:43:34.210441 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Mar 20 15:43:34 crc kubenswrapper[4813]: I0320 15:43:34.398224 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Mar 20 15:43:34 crc kubenswrapper[4813]: I0320 15:43:34.548182 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Mar 20 15:43:34 crc kubenswrapper[4813]: I0320 15:43:34.685548 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.029599 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.052617 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.129768 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.177227 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.205734 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.228625 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.266289 4813 scope.go:117] "RemoveContainer" containerID="c69797f22bcdd5efd9fd4bb87e73f9eb55973b0a02b475fa240a55dee17c818b" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.344529 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.574869 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Mar 20 15:43:35 crc kubenswrapper[4813]: I0320 15:43:35.950018 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.081291 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.081344 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8d3581cae8ef9e9fc8417ba6d1560b225aa37013be9a73892592e06fa02e26ed"} Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.142625 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.285348 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.288778 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.408916 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.415107 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.415650 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.469106 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.610107 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.779040 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.958520 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Mar 20 15:43:36 crc kubenswrapper[4813]: I0320 15:43:36.999027 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.005283 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.007931 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.089221 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.089960 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/1.log" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.090021 4813 generic.go:334] "Generic (PLEG): container finished" podID="9d751cbb-f2e2-430d-9754-c882a5e924a5" containerID="8d3581cae8ef9e9fc8417ba6d1560b225aa37013be9a73892592e06fa02e26ed" exitCode=255 Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.090058 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerDied","Data":"8d3581cae8ef9e9fc8417ba6d1560b225aa37013be9a73892592e06fa02e26ed"} Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.090100 4813 scope.go:117] "RemoveContainer" containerID="c69797f22bcdd5efd9fd4bb87e73f9eb55973b0a02b475fa240a55dee17c818b" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.090604 4813 scope.go:117] "RemoveContainer" containerID="8d3581cae8ef9e9fc8417ba6d1560b225aa37013be9a73892592e06fa02e26ed" Mar 20 15:43:37 crc kubenswrapper[4813]: E0320 15:43:37.090832 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.141686 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.179310 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.184409 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.209450 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.361019 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.384373 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.460407 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.488831 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.519004 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.536945 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.560179 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.606437 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.873407 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Mar 20 15:43:37 crc kubenswrapper[4813]: I0320 15:43:37.933799 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.022340 4813 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.035044 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.096742 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.097189 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.122767 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.129586 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.145530 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.165555 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.185422 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.201707 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.303200 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.349642 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.485886 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.511816 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.534111 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.738052 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.832535 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.857582 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Mar 20 15:43:38 crc kubenswrapper[4813]: I0320 15:43:38.866436 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.021319 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.039319 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.082028 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.223423 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.367549 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.439813 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.494990 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.546842 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.589341 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.644573 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.690047 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Mar 20 15:43:39 crc kubenswrapper[4813]: I0320 15:43:39.743645 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.097083 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.172440 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.190395 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.193406 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.193510 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.236141 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.248308 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.396619 4813 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.401520 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-lgf4j"] Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.401588 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.406267 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.439836 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=17.439816892 podStartE2EDuration="17.439816892s" podCreationTimestamp="2026-03-20 15:43:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:43:40.433257197 +0000 UTC m=+349.855960038" watchObservedRunningTime="2026-03-20 15:43:40.439816892 +0000 UTC m=+349.862519753" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.470854 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.471079 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.551312 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.648616 4813 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.931700 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.964405 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.977503 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Mar 20 15:43:40 crc kubenswrapper[4813]: I0320 15:43:40.991869 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.080130 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.138574 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.213327 4813 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.271373 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.277990 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e4637c-fec1-435e-87db-4218601b4c45" path="/var/lib/kubelet/pods/56e4637c-fec1-435e-87db-4218601b4c45/volumes" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.293903 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.340289 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.369740 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.381732 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.425858 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.593780 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.620626 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.627622 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.707051 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.802197 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.820774 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.857369 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Mar 20 15:43:41 crc kubenswrapper[4813]: I0320 15:43:41.966832 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.263902 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.273193 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.414961 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.419186 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.676678 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.685601 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.693040 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.717255 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.726397 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.919968 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.944547 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Mar 20 15:43:42 crc kubenswrapper[4813]: I0320 15:43:42.960291 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.283000 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.302822 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.305946 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.394176 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.473551 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.504707 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.565850 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.628663 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.645544 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.670328 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.764909 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.819277 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.849905 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.955064 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Mar 20 15:43:43 crc kubenswrapper[4813]: I0320 15:43:43.975244 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.061917 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.067594 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.150849 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.207025 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.209171 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.307857 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.345382 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.400607 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.624037 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.652266 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.680646 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.720001 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.840666 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.847072 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.849328 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.861050 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.874706 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.877846 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.937750 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Mar 20 15:43:44 crc kubenswrapper[4813]: I0320 15:43:44.965138 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.045758 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.129463 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.223476 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.239083 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.334921 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.341114 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.394711 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.439781 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.487675 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.572032 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.594319 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.625046 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.661829 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.693526 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.726536 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.738684 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.757143 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.800431 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.943102 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Mar 20 15:43:45 crc kubenswrapper[4813]: I0320 15:43:45.947027 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.036652 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.043279 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.080080 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.141624 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.203620 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.208194 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.249262 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.253908 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.330798 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.365669 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.383692 4813 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.383984 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69" gracePeriod=5 Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.388966 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.480903 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.517575 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.524415 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.544743 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.709311 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.736103 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.754506 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.875999 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Mar 20 15:43:46 crc kubenswrapper[4813]: I0320 15:43:46.955934 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.041269 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.117258 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.172998 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.179063 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.205052 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.229353 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.238714 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.240436 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.274738 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.306258 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.340559 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.518183 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.579557 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.639211 4813 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.639222 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Mar 20 15:43:47 crc kubenswrapper[4813]: I0320 15:43:47.693928 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.033693 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.125554 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.150755 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.155727 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.291338 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.353212 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.456441 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.546331 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.699833 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.722524 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.728558 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-78779c56d4-vqtcw"] Mar 20 15:43:48 crc kubenswrapper[4813]: E0320 15:43:48.728913 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e4637c-fec1-435e-87db-4218601b4c45" containerName="oauth-openshift" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.728946 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e4637c-fec1-435e-87db-4218601b4c45" containerName="oauth-openshift" Mar 20 15:43:48 crc kubenswrapper[4813]: E0320 15:43:48.728980 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" containerName="installer" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.728996 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" containerName="installer" Mar 20 15:43:48 crc kubenswrapper[4813]: E0320 15:43:48.729024 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.729041 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.729277 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b57c2d-92a5-479c-bcb3-5382a04091fc" containerName="installer" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.729306 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.729331 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e4637c-fec1-435e-87db-4218601b4c45" containerName="oauth-openshift" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.730107 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.739805 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.740118 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.745154 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.747829 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.747930 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.747860 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.748389 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.748832 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.749180 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.749295 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.752858 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.753245 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.754328 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.759351 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.759869 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-78779c56d4-vqtcw"] Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.763109 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.830751 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-service-ca\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.831113 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.831298 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgbh8\" (UniqueName: \"kubernetes.io/projected/bf5fbd2d-39bf-4642-9ada-14cf7e141542-kube-api-access-hgbh8\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.831537 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-cliconfig\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.831693 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-login\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.831958 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-router-certs\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.832167 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-serving-cert\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.832353 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-error\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.832552 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-session\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.832752 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf5fbd2d-39bf-4642-9ada-14cf7e141542-audit-dir\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.832937 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.833120 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-audit-policies\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.833275 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.833445 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.859928 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.873781 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.934939 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf5fbd2d-39bf-4642-9ada-14cf7e141542-audit-dir\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.934995 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935036 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-audit-policies\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935059 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935080 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935104 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-service-ca\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935127 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935150 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgbh8\" (UniqueName: \"kubernetes.io/projected/bf5fbd2d-39bf-4642-9ada-14cf7e141542-kube-api-access-hgbh8\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935174 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-cliconfig\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935193 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-login\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935215 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-router-certs\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935279 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-serving-cert\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935314 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-error\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.935354 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-session\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.936046 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf5fbd2d-39bf-4642-9ada-14cf7e141542-audit-dir\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.936668 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-audit-policies\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.937260 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-service-ca\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.937600 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-cliconfig\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.938383 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.941842 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-session\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.942650 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.942770 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.942906 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-serving-cert\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.943257 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.952821 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-system-router-certs\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.955154 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgbh8\" (UniqueName: \"kubernetes.io/projected/bf5fbd2d-39bf-4642-9ada-14cf7e141542-kube-api-access-hgbh8\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.957002 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-login\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:48 crc kubenswrapper[4813]: I0320 15:43:48.958359 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bf5fbd2d-39bf-4642-9ada-14cf7e141542-v4-0-config-user-template-error\") pod \"oauth-openshift-78779c56d4-vqtcw\" (UID: \"bf5fbd2d-39bf-4642-9ada-14cf7e141542\") " pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.050405 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.132598 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.190528 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.265880 4813 scope.go:117] "RemoveContainer" containerID="8d3581cae8ef9e9fc8417ba6d1560b225aa37013be9a73892592e06fa02e26ed" Mar 20 15:43:49 crc kubenswrapper[4813]: E0320 15:43:49.266328 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=check-endpoints pod=network-check-source-55646444c4-trplf_openshift-network-diagnostics(9d751cbb-f2e2-430d-9754-c882a5e924a5)\"" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.301837 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.491565 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-78779c56d4-vqtcw"] Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.784944 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Mar 20 15:43:49 crc kubenswrapper[4813]: I0320 15:43:49.828551 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:43:50 crc kubenswrapper[4813]: I0320 15:43:50.055164 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Mar 20 15:43:50 crc kubenswrapper[4813]: I0320 15:43:50.171300 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" event={"ID":"bf5fbd2d-39bf-4642-9ada-14cf7e141542","Type":"ContainerStarted","Data":"f64b55067ec5be20bffb6ab037b7e53eae7ce22303f7b84f517f8bd9f757380b"} Mar 20 15:43:50 crc kubenswrapper[4813]: I0320 15:43:50.171360 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" event={"ID":"bf5fbd2d-39bf-4642-9ada-14cf7e141542","Type":"ContainerStarted","Data":"66433d9bf6b26e6e44357550b8c6cb83604e3d1ce3273637d0a8e5ea376f9495"} Mar 20 15:43:50 crc kubenswrapper[4813]: I0320 15:43:50.172924 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:50 crc kubenswrapper[4813]: I0320 15:43:50.188011 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Mar 20 15:43:50 crc kubenswrapper[4813]: I0320 15:43:50.209098 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" podStartSLOduration=56.209082375 podStartE2EDuration="56.209082375s" podCreationTimestamp="2026-03-20 15:42:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:43:50.206948618 +0000 UTC m=+359.629651459" watchObservedRunningTime="2026-03-20 15:43:50.209082375 +0000 UTC m=+359.631785216" Mar 20 15:43:50 crc kubenswrapper[4813]: I0320 15:43:50.454044 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-78779c56d4-vqtcw" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.509237 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.509346 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667399 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667507 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667550 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667573 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667603 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667625 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667657 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667733 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.667765 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.668066 4813 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.668084 4813 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.668095 4813 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.668106 4813 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.681287 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:43:51 crc kubenswrapper[4813]: I0320 15:43:51.769329 4813 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Mar 20 15:43:52 crc kubenswrapper[4813]: I0320 15:43:52.188296 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Mar 20 15:43:52 crc kubenswrapper[4813]: I0320 15:43:52.188394 4813 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69" exitCode=137 Mar 20 15:43:52 crc kubenswrapper[4813]: I0320 15:43:52.188530 4813 scope.go:117] "RemoveContainer" containerID="6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69" Mar 20 15:43:52 crc kubenswrapper[4813]: I0320 15:43:52.188534 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 15:43:52 crc kubenswrapper[4813]: I0320 15:43:52.215786 4813 scope.go:117] "RemoveContainer" containerID="6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69" Mar 20 15:43:52 crc kubenswrapper[4813]: E0320 15:43:52.216388 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69\": container with ID starting with 6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69 not found: ID does not exist" containerID="6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69" Mar 20 15:43:52 crc kubenswrapper[4813]: I0320 15:43:52.216442 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69"} err="failed to get container status \"6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69\": rpc error: code = NotFound desc = could not find container \"6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69\": container with ID starting with 6532ef1f987401df50aef7739cef1f571569b18c3f8cceb9485d07e5db24bb69 not found: ID does not exist" Mar 20 15:43:53 crc kubenswrapper[4813]: I0320 15:43:53.278736 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.176513 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567024-hvhw8"] Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.178077 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567024-hvhw8" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.180106 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.185539 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.185816 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.190835 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567024-hvhw8"] Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.265566 4813 scope.go:117] "RemoveContainer" containerID="8d3581cae8ef9e9fc8417ba6d1560b225aa37013be9a73892592e06fa02e26ed" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.288079 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4298\" (UniqueName: \"kubernetes.io/projected/992ec145-8ff5-49a3-aa0f-6bb554b83c80-kube-api-access-b4298\") pod \"auto-csr-approver-29567024-hvhw8\" (UID: \"992ec145-8ff5-49a3-aa0f-6bb554b83c80\") " pod="openshift-infra/auto-csr-approver-29567024-hvhw8" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.389900 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4298\" (UniqueName: \"kubernetes.io/projected/992ec145-8ff5-49a3-aa0f-6bb554b83c80-kube-api-access-b4298\") pod \"auto-csr-approver-29567024-hvhw8\" (UID: \"992ec145-8ff5-49a3-aa0f-6bb554b83c80\") " pod="openshift-infra/auto-csr-approver-29567024-hvhw8" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.410381 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4298\" (UniqueName: \"kubernetes.io/projected/992ec145-8ff5-49a3-aa0f-6bb554b83c80-kube-api-access-b4298\") pod \"auto-csr-approver-29567024-hvhw8\" (UID: \"992ec145-8ff5-49a3-aa0f-6bb554b83c80\") " pod="openshift-infra/auto-csr-approver-29567024-hvhw8" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.494782 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567024-hvhw8" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.840863 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 15:44:00 crc kubenswrapper[4813]: I0320 15:44:00.940138 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567024-hvhw8"] Mar 20 15:44:00 crc kubenswrapper[4813]: W0320 15:44:00.946182 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod992ec145_8ff5_49a3_aa0f_6bb554b83c80.slice/crio-346db02fc2368138547aac577137ddec5de90bf9c126a6d7e547245d2b46ee0c WatchSource:0}: Error finding container 346db02fc2368138547aac577137ddec5de90bf9c126a6d7e547245d2b46ee0c: Status 404 returned error can't find the container with id 346db02fc2368138547aac577137ddec5de90bf9c126a6d7e547245d2b46ee0c Mar 20 15:44:01 crc kubenswrapper[4813]: I0320 15:44:01.246624 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-network-diagnostics_network-check-source-55646444c4-trplf_9d751cbb-f2e2-430d-9754-c882a5e924a5/check-endpoints/2.log" Mar 20 15:44:01 crc kubenswrapper[4813]: I0320 15:44:01.246716 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7c0750cecc24617200a60eb715b9b1653c6da153b17c7c77079af24269dd42dc"} Mar 20 15:44:01 crc kubenswrapper[4813]: I0320 15:44:01.247730 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567024-hvhw8" event={"ID":"992ec145-8ff5-49a3-aa0f-6bb554b83c80","Type":"ContainerStarted","Data":"346db02fc2368138547aac577137ddec5de90bf9c126a6d7e547245d2b46ee0c"} Mar 20 15:44:01 crc kubenswrapper[4813]: I0320 15:44:01.712841 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f59db95fb-jt24h"] Mar 20 15:44:01 crc kubenswrapper[4813]: I0320 15:44:01.713604 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" podUID="3e6b525b-9ba3-4d8f-aaf7-178e75265b30" containerName="controller-manager" containerID="cri-o://7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8" gracePeriod=30 Mar 20 15:44:01 crc kubenswrapper[4813]: I0320 15:44:01.798581 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl"] Mar 20 15:44:01 crc kubenswrapper[4813]: I0320 15:44:01.798827 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" podUID="2b602981-d371-41c8-a445-8cb07dc1553e" containerName="route-controller-manager" containerID="cri-o://c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1" gracePeriod=30 Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.084322 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.136540 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.210780 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-serving-cert\") pod \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.210983 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-client-ca\") pod \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.211067 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ks2q\" (UniqueName: \"kubernetes.io/projected/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-kube-api-access-5ks2q\") pod \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.211126 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-proxy-ca-bundles\") pod \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.211187 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-config\") pod \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\" (UID: \"3e6b525b-9ba3-4d8f-aaf7-178e75265b30\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.212342 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3e6b525b-9ba3-4d8f-aaf7-178e75265b30" (UID: "3e6b525b-9ba3-4d8f-aaf7-178e75265b30"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.212371 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-config" (OuterVolumeSpecName: "config") pod "3e6b525b-9ba3-4d8f-aaf7-178e75265b30" (UID: "3e6b525b-9ba3-4d8f-aaf7-178e75265b30"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.212672 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-client-ca" (OuterVolumeSpecName: "client-ca") pod "3e6b525b-9ba3-4d8f-aaf7-178e75265b30" (UID: "3e6b525b-9ba3-4d8f-aaf7-178e75265b30"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.213745 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.213802 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.213829 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.216552 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3e6b525b-9ba3-4d8f-aaf7-178e75265b30" (UID: "3e6b525b-9ba3-4d8f-aaf7-178e75265b30"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.216590 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-kube-api-access-5ks2q" (OuterVolumeSpecName: "kube-api-access-5ks2q") pod "3e6b525b-9ba3-4d8f-aaf7-178e75265b30" (UID: "3e6b525b-9ba3-4d8f-aaf7-178e75265b30"). InnerVolumeSpecName "kube-api-access-5ks2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.254589 4813 generic.go:334] "Generic (PLEG): container finished" podID="3e6b525b-9ba3-4d8f-aaf7-178e75265b30" containerID="7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8" exitCode=0 Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.254676 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" event={"ID":"3e6b525b-9ba3-4d8f-aaf7-178e75265b30","Type":"ContainerDied","Data":"7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8"} Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.254709 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" event={"ID":"3e6b525b-9ba3-4d8f-aaf7-178e75265b30","Type":"ContainerDied","Data":"bb49632ca83643c2f01a9c0d49898948992da8f88854ef280991b875c96b8bf0"} Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.254728 4813 scope.go:117] "RemoveContainer" containerID="7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.254721 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f59db95fb-jt24h" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.258426 4813 generic.go:334] "Generic (PLEG): container finished" podID="2b602981-d371-41c8-a445-8cb07dc1553e" containerID="c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1" exitCode=0 Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.258476 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.258471 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" event={"ID":"2b602981-d371-41c8-a445-8cb07dc1553e","Type":"ContainerDied","Data":"c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1"} Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.258639 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl" event={"ID":"2b602981-d371-41c8-a445-8cb07dc1553e","Type":"ContainerDied","Data":"de931f7a3c8a8b576c53f29506a7cb71cfc799e283b23a31bbe9cb140a9f47e3"} Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.279209 4813 scope.go:117] "RemoveContainer" containerID="7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8" Mar 20 15:44:02 crc kubenswrapper[4813]: E0320 15:44:02.280057 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8\": container with ID starting with 7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8 not found: ID does not exist" containerID="7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.280194 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8"} err="failed to get container status \"7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8\": rpc error: code = NotFound desc = could not find container \"7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8\": container with ID starting with 7e191cccf7574ab80583fc1a407f9ca64a2bbf202df6a90cfcdc6940378eb7d8 not found: ID does not exist" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.280337 4813 scope.go:117] "RemoveContainer" containerID="c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.294085 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f59db95fb-jt24h"] Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.298611 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7f59db95fb-jt24h"] Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.303599 4813 scope.go:117] "RemoveContainer" containerID="c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1" Mar 20 15:44:02 crc kubenswrapper[4813]: E0320 15:44:02.304231 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1\": container with ID starting with c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1 not found: ID does not exist" containerID="c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.304276 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1"} err="failed to get container status \"c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1\": rpc error: code = NotFound desc = could not find container \"c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1\": container with ID starting with c70334684998923e58ad61db8739cf3103bf1041f05e40f3d26fe7656550edd1 not found: ID does not exist" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.314977 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvj6c\" (UniqueName: \"kubernetes.io/projected/2b602981-d371-41c8-a445-8cb07dc1553e-kube-api-access-kvj6c\") pod \"2b602981-d371-41c8-a445-8cb07dc1553e\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.315147 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-client-ca\") pod \"2b602981-d371-41c8-a445-8cb07dc1553e\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.315219 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b602981-d371-41c8-a445-8cb07dc1553e-serving-cert\") pod \"2b602981-d371-41c8-a445-8cb07dc1553e\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.315325 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-config\") pod \"2b602981-d371-41c8-a445-8cb07dc1553e\" (UID: \"2b602981-d371-41c8-a445-8cb07dc1553e\") " Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.315777 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.315831 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ks2q\" (UniqueName: \"kubernetes.io/projected/3e6b525b-9ba3-4d8f-aaf7-178e75265b30-kube-api-access-5ks2q\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.316447 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-config" (OuterVolumeSpecName: "config") pod "2b602981-d371-41c8-a445-8cb07dc1553e" (UID: "2b602981-d371-41c8-a445-8cb07dc1553e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.316595 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-client-ca" (OuterVolumeSpecName: "client-ca") pod "2b602981-d371-41c8-a445-8cb07dc1553e" (UID: "2b602981-d371-41c8-a445-8cb07dc1553e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.318739 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b602981-d371-41c8-a445-8cb07dc1553e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2b602981-d371-41c8-a445-8cb07dc1553e" (UID: "2b602981-d371-41c8-a445-8cb07dc1553e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.319715 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b602981-d371-41c8-a445-8cb07dc1553e-kube-api-access-kvj6c" (OuterVolumeSpecName: "kube-api-access-kvj6c") pod "2b602981-d371-41c8-a445-8cb07dc1553e" (UID: "2b602981-d371-41c8-a445-8cb07dc1553e"). InnerVolumeSpecName "kube-api-access-kvj6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.416786 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.416825 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b602981-d371-41c8-a445-8cb07dc1553e-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.416836 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b602981-d371-41c8-a445-8cb07dc1553e-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.416846 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvj6c\" (UniqueName: \"kubernetes.io/projected/2b602981-d371-41c8-a445-8cb07dc1553e-kube-api-access-kvj6c\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.597891 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl"] Mar 20 15:44:02 crc kubenswrapper[4813]: I0320 15:44:02.606760 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-744645547b-cgfcl"] Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.272196 4813 generic.go:334] "Generic (PLEG): container finished" podID="992ec145-8ff5-49a3-aa0f-6bb554b83c80" containerID="543fc3c3421acd945590cc546ed1fc59833d02a7f8dd38550b774288e9f8e904" exitCode=0 Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.278858 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b602981-d371-41c8-a445-8cb07dc1553e" path="/var/lib/kubelet/pods/2b602981-d371-41c8-a445-8cb07dc1553e/volumes" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.279826 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e6b525b-9ba3-4d8f-aaf7-178e75265b30" path="/var/lib/kubelet/pods/3e6b525b-9ba3-4d8f-aaf7-178e75265b30/volumes" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.280395 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567024-hvhw8" event={"ID":"992ec145-8ff5-49a3-aa0f-6bb554b83c80","Type":"ContainerDied","Data":"543fc3c3421acd945590cc546ed1fc59833d02a7f8dd38550b774288e9f8e904"} Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.467350 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.734645 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg"] Mar 20 15:44:03 crc kubenswrapper[4813]: E0320 15:44:03.734987 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b602981-d371-41c8-a445-8cb07dc1553e" containerName="route-controller-manager" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.735007 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b602981-d371-41c8-a445-8cb07dc1553e" containerName="route-controller-manager" Mar 20 15:44:03 crc kubenswrapper[4813]: E0320 15:44:03.735031 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6b525b-9ba3-4d8f-aaf7-178e75265b30" containerName="controller-manager" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.735046 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6b525b-9ba3-4d8f-aaf7-178e75265b30" containerName="controller-manager" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.735245 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e6b525b-9ba3-4d8f-aaf7-178e75265b30" containerName="controller-manager" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.735273 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b602981-d371-41c8-a445-8cb07dc1553e" containerName="route-controller-manager" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.735906 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.739424 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.741563 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.741925 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.741943 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.742178 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.742685 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.745722 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-2crxf"] Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.746517 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.748566 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.748795 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.749037 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.750683 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.750883 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.751201 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.755616 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg"] Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.759047 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.759968 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-2crxf"] Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.836077 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-client-ca\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.836439 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/661bbfc8-f9c7-4991-9432-c7c9f5726a15-serving-cert\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.836475 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-config\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.836562 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6nr7\" (UniqueName: \"kubernetes.io/projected/661bbfc8-f9c7-4991-9432-c7c9f5726a15-kube-api-access-b6nr7\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938281 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8191d79b-d424-47ad-823b-a8bb7d9dd351-serving-cert\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938521 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6nr7\" (UniqueName: \"kubernetes.io/projected/661bbfc8-f9c7-4991-9432-c7c9f5726a15-kube-api-access-b6nr7\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938634 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-proxy-ca-bundles\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938684 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28xp8\" (UniqueName: \"kubernetes.io/projected/8191d79b-d424-47ad-823b-a8bb7d9dd351-kube-api-access-28xp8\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938807 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-client-ca\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938874 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-client-ca\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938918 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-config\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.938967 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/661bbfc8-f9c7-4991-9432-c7c9f5726a15-serving-cert\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.939051 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-config\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.941137 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-client-ca\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.942902 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-config\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.948253 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/661bbfc8-f9c7-4991-9432-c7c9f5726a15-serving-cert\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:03 crc kubenswrapper[4813]: I0320 15:44:03.970413 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6nr7\" (UniqueName: \"kubernetes.io/projected/661bbfc8-f9c7-4991-9432-c7c9f5726a15-kube-api-access-b6nr7\") pod \"route-controller-manager-6f8d66c474-hb7jg\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.040854 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-client-ca\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.040960 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-config\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.041071 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8191d79b-d424-47ad-823b-a8bb7d9dd351-serving-cert\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.041157 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-proxy-ca-bundles\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.041202 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28xp8\" (UniqueName: \"kubernetes.io/projected/8191d79b-d424-47ad-823b-a8bb7d9dd351-kube-api-access-28xp8\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.042944 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-client-ca\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.045194 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-config\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.047329 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-proxy-ca-bundles\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.056430 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8191d79b-d424-47ad-823b-a8bb7d9dd351-serving-cert\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.061363 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.064517 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28xp8\" (UniqueName: \"kubernetes.io/projected/8191d79b-d424-47ad-823b-a8bb7d9dd351-kube-api-access-28xp8\") pod \"controller-manager-59d77b64d6-2crxf\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.073589 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.340152 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-2crxf"] Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.492358 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg"] Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.496393 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567024-hvhw8" Mar 20 15:44:04 crc kubenswrapper[4813]: W0320 15:44:04.498992 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod661bbfc8_f9c7_4991_9432_c7c9f5726a15.slice/crio-207a0146074b2084f41ffa4da23b449ce31bc7b77157e22cd4d4fce63a9d9849 WatchSource:0}: Error finding container 207a0146074b2084f41ffa4da23b449ce31bc7b77157e22cd4d4fce63a9d9849: Status 404 returned error can't find the container with id 207a0146074b2084f41ffa4da23b449ce31bc7b77157e22cd4d4fce63a9d9849 Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.647625 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4298\" (UniqueName: \"kubernetes.io/projected/992ec145-8ff5-49a3-aa0f-6bb554b83c80-kube-api-access-b4298\") pod \"992ec145-8ff5-49a3-aa0f-6bb554b83c80\" (UID: \"992ec145-8ff5-49a3-aa0f-6bb554b83c80\") " Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.653311 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/992ec145-8ff5-49a3-aa0f-6bb554b83c80-kube-api-access-b4298" (OuterVolumeSpecName: "kube-api-access-b4298") pod "992ec145-8ff5-49a3-aa0f-6bb554b83c80" (UID: "992ec145-8ff5-49a3-aa0f-6bb554b83c80"). InnerVolumeSpecName "kube-api-access-b4298". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:44:04 crc kubenswrapper[4813]: I0320 15:44:04.749147 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4298\" (UniqueName: \"kubernetes.io/projected/992ec145-8ff5-49a3-aa0f-6bb554b83c80-kube-api-access-b4298\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.289405 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" event={"ID":"8191d79b-d424-47ad-823b-a8bb7d9dd351","Type":"ContainerStarted","Data":"f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009"} Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.290069 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" event={"ID":"8191d79b-d424-47ad-823b-a8bb7d9dd351","Type":"ContainerStarted","Data":"f8b8691a0d823f0cfbd7f8f7a61fbdb50cd70bb6d54e4308e68facae790273d5"} Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.290121 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.293140 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567024-hvhw8" event={"ID":"992ec145-8ff5-49a3-aa0f-6bb554b83c80","Type":"ContainerDied","Data":"346db02fc2368138547aac577137ddec5de90bf9c126a6d7e547245d2b46ee0c"} Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.293197 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="346db02fc2368138547aac577137ddec5de90bf9c126a6d7e547245d2b46ee0c" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.293215 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567024-hvhw8" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.294215 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.294869 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" event={"ID":"661bbfc8-f9c7-4991-9432-c7c9f5726a15","Type":"ContainerStarted","Data":"ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6"} Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.294900 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" event={"ID":"661bbfc8-f9c7-4991-9432-c7c9f5726a15","Type":"ContainerStarted","Data":"207a0146074b2084f41ffa4da23b449ce31bc7b77157e22cd4d4fce63a9d9849"} Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.295061 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.307984 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.335082 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" podStartSLOduration=4.33506476 podStartE2EDuration="4.33506476s" podCreationTimestamp="2026-03-20 15:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:44:05.332570943 +0000 UTC m=+374.755273864" watchObservedRunningTime="2026-03-20 15:44:05.33506476 +0000 UTC m=+374.757767621" Mar 20 15:44:05 crc kubenswrapper[4813]: I0320 15:44:05.339408 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" podStartSLOduration=4.339393305 podStartE2EDuration="4.339393305s" podCreationTimestamp="2026-03-20 15:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:44:05.318686754 +0000 UTC m=+374.741389595" watchObservedRunningTime="2026-03-20 15:44:05.339393305 +0000 UTC m=+374.762096146" Mar 20 15:44:11 crc kubenswrapper[4813]: I0320 15:44:10.999971 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Mar 20 15:44:12 crc kubenswrapper[4813]: I0320 15:44:12.338381 4813 generic.go:334] "Generic (PLEG): container finished" podID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerID="3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375" exitCode=0 Mar 20 15:44:12 crc kubenswrapper[4813]: I0320 15:44:12.338447 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" event={"ID":"3088bb81-3f95-4383-bbd5-ef89df01a20f","Type":"ContainerDied","Data":"3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375"} Mar 20 15:44:12 crc kubenswrapper[4813]: I0320 15:44:12.339055 4813 scope.go:117] "RemoveContainer" containerID="3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375" Mar 20 15:44:13 crc kubenswrapper[4813]: I0320 15:44:13.345238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" event={"ID":"3088bb81-3f95-4383-bbd5-ef89df01a20f","Type":"ContainerStarted","Data":"98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb"} Mar 20 15:44:13 crc kubenswrapper[4813]: I0320 15:44:13.346106 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:44:13 crc kubenswrapper[4813]: I0320 15:44:13.348358 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:44:16 crc kubenswrapper[4813]: I0320 15:44:16.679996 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Mar 20 15:44:18 crc kubenswrapper[4813]: I0320 15:44:18.420211 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Mar 20 15:44:19 crc kubenswrapper[4813]: I0320 15:44:19.447188 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Mar 20 15:44:21 crc kubenswrapper[4813]: I0320 15:44:21.708295 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-2crxf"] Mar 20 15:44:21 crc kubenswrapper[4813]: I0320 15:44:21.708564 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" podUID="8191d79b-d424-47ad-823b-a8bb7d9dd351" containerName="controller-manager" containerID="cri-o://f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009" gracePeriod=30 Mar 20 15:44:21 crc kubenswrapper[4813]: I0320 15:44:21.729073 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg"] Mar 20 15:44:21 crc kubenswrapper[4813]: I0320 15:44:21.729550 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" podUID="661bbfc8-f9c7-4991-9432-c7c9f5726a15" containerName="route-controller-manager" containerID="cri-o://ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6" gracePeriod=30 Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.266679 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.308351 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.338711 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/661bbfc8-f9c7-4991-9432-c7c9f5726a15-serving-cert\") pod \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.338773 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-client-ca\") pod \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.338797 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28xp8\" (UniqueName: \"kubernetes.io/projected/8191d79b-d424-47ad-823b-a8bb7d9dd351-kube-api-access-28xp8\") pod \"8191d79b-d424-47ad-823b-a8bb7d9dd351\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.338830 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6nr7\" (UniqueName: \"kubernetes.io/projected/661bbfc8-f9c7-4991-9432-c7c9f5726a15-kube-api-access-b6nr7\") pod \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.339510 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-client-ca" (OuterVolumeSpecName: "client-ca") pod "661bbfc8-f9c7-4991-9432-c7c9f5726a15" (UID: "661bbfc8-f9c7-4991-9432-c7c9f5726a15"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.344118 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8191d79b-d424-47ad-823b-a8bb7d9dd351-kube-api-access-28xp8" (OuterVolumeSpecName: "kube-api-access-28xp8") pod "8191d79b-d424-47ad-823b-a8bb7d9dd351" (UID: "8191d79b-d424-47ad-823b-a8bb7d9dd351"). InnerVolumeSpecName "kube-api-access-28xp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.344288 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/661bbfc8-f9c7-4991-9432-c7c9f5726a15-kube-api-access-b6nr7" (OuterVolumeSpecName: "kube-api-access-b6nr7") pod "661bbfc8-f9c7-4991-9432-c7c9f5726a15" (UID: "661bbfc8-f9c7-4991-9432-c7c9f5726a15"). InnerVolumeSpecName "kube-api-access-b6nr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.348098 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/661bbfc8-f9c7-4991-9432-c7c9f5726a15-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "661bbfc8-f9c7-4991-9432-c7c9f5726a15" (UID: "661bbfc8-f9c7-4991-9432-c7c9f5726a15"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.395282 4813 generic.go:334] "Generic (PLEG): container finished" podID="661bbfc8-f9c7-4991-9432-c7c9f5726a15" containerID="ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6" exitCode=0 Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.395361 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" event={"ID":"661bbfc8-f9c7-4991-9432-c7c9f5726a15","Type":"ContainerDied","Data":"ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6"} Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.395391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" event={"ID":"661bbfc8-f9c7-4991-9432-c7c9f5726a15","Type":"ContainerDied","Data":"207a0146074b2084f41ffa4da23b449ce31bc7b77157e22cd4d4fce63a9d9849"} Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.395415 4813 scope.go:117] "RemoveContainer" containerID="ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.395704 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.400283 4813 generic.go:334] "Generic (PLEG): container finished" podID="8191d79b-d424-47ad-823b-a8bb7d9dd351" containerID="f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009" exitCode=0 Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.400320 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" event={"ID":"8191d79b-d424-47ad-823b-a8bb7d9dd351","Type":"ContainerDied","Data":"f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009"} Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.400343 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" event={"ID":"8191d79b-d424-47ad-823b-a8bb7d9dd351","Type":"ContainerDied","Data":"f8b8691a0d823f0cfbd7f8f7a61fbdb50cd70bb6d54e4308e68facae790273d5"} Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.400375 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59d77b64d6-2crxf" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.414087 4813 scope.go:117] "RemoveContainer" containerID="ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6" Mar 20 15:44:22 crc kubenswrapper[4813]: E0320 15:44:22.415069 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6\": container with ID starting with ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6 not found: ID does not exist" containerID="ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.415189 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6"} err="failed to get container status \"ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6\": rpc error: code = NotFound desc = could not find container \"ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6\": container with ID starting with ed80ae06266603d5d45c27b36fc94f8da65720c81412614a4be70405e8fc90d6 not found: ID does not exist" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.415291 4813 scope.go:117] "RemoveContainer" containerID="f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.431381 4813 scope.go:117] "RemoveContainer" containerID="f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009" Mar 20 15:44:22 crc kubenswrapper[4813]: E0320 15:44:22.431944 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009\": container with ID starting with f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009 not found: ID does not exist" containerID="f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.431972 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009"} err="failed to get container status \"f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009\": rpc error: code = NotFound desc = could not find container \"f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009\": container with ID starting with f3c84cb66680e5300a6c670d48f26447b26957cbcd3bd6fd9dac1d624f289009 not found: ID does not exist" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.439359 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-config\") pod \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\" (UID: \"661bbfc8-f9c7-4991-9432-c7c9f5726a15\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.439583 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8191d79b-d424-47ad-823b-a8bb7d9dd351-serving-cert\") pod \"8191d79b-d424-47ad-823b-a8bb7d9dd351\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.439705 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-config\") pod \"8191d79b-d424-47ad-823b-a8bb7d9dd351\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.439808 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-proxy-ca-bundles\") pod \"8191d79b-d424-47ad-823b-a8bb7d9dd351\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440442 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-client-ca\") pod \"8191d79b-d424-47ad-823b-a8bb7d9dd351\" (UID: \"8191d79b-d424-47ad-823b-a8bb7d9dd351\") " Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440700 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/661bbfc8-f9c7-4991-9432-c7c9f5726a15-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440798 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440879 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28xp8\" (UniqueName: \"kubernetes.io/projected/8191d79b-d424-47ad-823b-a8bb7d9dd351-kube-api-access-28xp8\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440956 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6nr7\" (UniqueName: \"kubernetes.io/projected/661bbfc8-f9c7-4991-9432-c7c9f5726a15-kube-api-access-b6nr7\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440050 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-config" (OuterVolumeSpecName: "config") pod "661bbfc8-f9c7-4991-9432-c7c9f5726a15" (UID: "661bbfc8-f9c7-4991-9432-c7c9f5726a15"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440322 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-config" (OuterVolumeSpecName: "config") pod "8191d79b-d424-47ad-823b-a8bb7d9dd351" (UID: "8191d79b-d424-47ad-823b-a8bb7d9dd351"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.440395 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8191d79b-d424-47ad-823b-a8bb7d9dd351" (UID: "8191d79b-d424-47ad-823b-a8bb7d9dd351"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.441795 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-client-ca" (OuterVolumeSpecName: "client-ca") pod "8191d79b-d424-47ad-823b-a8bb7d9dd351" (UID: "8191d79b-d424-47ad-823b-a8bb7d9dd351"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.444447 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8191d79b-d424-47ad-823b-a8bb7d9dd351-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8191d79b-d424-47ad-823b-a8bb7d9dd351" (UID: "8191d79b-d424-47ad-823b-a8bb7d9dd351"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.541882 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8191d79b-d424-47ad-823b-a8bb7d9dd351-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.541928 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.541941 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.541956 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8191d79b-d424-47ad-823b-a8bb7d9dd351-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.541968 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/661bbfc8-f9c7-4991-9432-c7c9f5726a15-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.739264 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-2crxf"] Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.748034 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-2crxf"] Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.757365 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg"] Mar 20 15:44:22 crc kubenswrapper[4813]: I0320 15:44:22.765319 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-hb7jg"] Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.020137 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-759b4c7f9b-66wxb"] Mar 20 15:44:23 crc kubenswrapper[4813]: E0320 15:44:23.020434 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8191d79b-d424-47ad-823b-a8bb7d9dd351" containerName="controller-manager" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.020450 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8191d79b-d424-47ad-823b-a8bb7d9dd351" containerName="controller-manager" Mar 20 15:44:23 crc kubenswrapper[4813]: E0320 15:44:23.020469 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="992ec145-8ff5-49a3-aa0f-6bb554b83c80" containerName="oc" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.020476 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="992ec145-8ff5-49a3-aa0f-6bb554b83c80" containerName="oc" Mar 20 15:44:23 crc kubenswrapper[4813]: E0320 15:44:23.020514 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="661bbfc8-f9c7-4991-9432-c7c9f5726a15" containerName="route-controller-manager" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.020524 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="661bbfc8-f9c7-4991-9432-c7c9f5726a15" containerName="route-controller-manager" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.020634 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8191d79b-d424-47ad-823b-a8bb7d9dd351" containerName="controller-manager" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.020647 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="992ec145-8ff5-49a3-aa0f-6bb554b83c80" containerName="oc" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.020656 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="661bbfc8-f9c7-4991-9432-c7c9f5726a15" containerName="route-controller-manager" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.021040 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.023665 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x"] Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.024305 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.024946 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.025132 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.026849 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.026964 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.027070 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.027734 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.028343 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.028556 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.028898 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.029277 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.029327 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.029613 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.033689 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-759b4c7f9b-66wxb"] Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.035667 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.037284 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x"] Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048554 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-proxy-ca-bundles\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048600 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp98q\" (UniqueName: \"kubernetes.io/projected/ebe64ded-4336-40a1-8b88-c793b0a7f332-kube-api-access-bp98q\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048719 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-client-ca\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048739 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkn4m\" (UniqueName: \"kubernetes.io/projected/b220c77a-c606-4edf-b103-88b10e46a391-kube-api-access-mkn4m\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048801 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-config\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048856 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-client-ca\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048880 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-config\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048918 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b220c77a-c606-4edf-b103-88b10e46a391-serving-cert\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.048946 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebe64ded-4336-40a1-8b88-c793b0a7f332-serving-cert\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.150115 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebe64ded-4336-40a1-8b88-c793b0a7f332-serving-cert\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.150473 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-proxy-ca-bundles\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.150668 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp98q\" (UniqueName: \"kubernetes.io/projected/ebe64ded-4336-40a1-8b88-c793b0a7f332-kube-api-access-bp98q\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.150873 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-client-ca\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.151014 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkn4m\" (UniqueName: \"kubernetes.io/projected/b220c77a-c606-4edf-b103-88b10e46a391-kube-api-access-mkn4m\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.151244 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-config\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.151412 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-client-ca\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.151599 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-config\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.151752 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b220c77a-c606-4edf-b103-88b10e46a391-serving-cert\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.151868 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-client-ca\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.151824 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-proxy-ca-bundles\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.152118 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-client-ca\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.152750 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-config\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.153031 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-config\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.156210 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b220c77a-c606-4edf-b103-88b10e46a391-serving-cert\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.156261 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebe64ded-4336-40a1-8b88-c793b0a7f332-serving-cert\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.168600 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp98q\" (UniqueName: \"kubernetes.io/projected/ebe64ded-4336-40a1-8b88-c793b0a7f332-kube-api-access-bp98q\") pod \"route-controller-manager-5479c9f4cd-6ht7x\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.168602 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkn4m\" (UniqueName: \"kubernetes.io/projected/b220c77a-c606-4edf-b103-88b10e46a391-kube-api-access-mkn4m\") pod \"controller-manager-759b4c7f9b-66wxb\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.272911 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="661bbfc8-f9c7-4991-9432-c7c9f5726a15" path="/var/lib/kubelet/pods/661bbfc8-f9c7-4991-9432-c7c9f5726a15/volumes" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.273599 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8191d79b-d424-47ad-823b-a8bb7d9dd351" path="/var/lib/kubelet/pods/8191d79b-d424-47ad-823b-a8bb7d9dd351/volumes" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.335067 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.345261 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.750891 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-759b4c7f9b-66wxb"] Mar 20 15:44:23 crc kubenswrapper[4813]: I0320 15:44:23.804905 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x"] Mar 20 15:44:23 crc kubenswrapper[4813]: W0320 15:44:23.808896 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebe64ded_4336_40a1_8b88_c793b0a7f332.slice/crio-10ceb5d7bf1ab8799d2d239a5bc6fa1f395132126cf941d170b3f2df46c99c61 WatchSource:0}: Error finding container 10ceb5d7bf1ab8799d2d239a5bc6fa1f395132126cf941d170b3f2df46c99c61: Status 404 returned error can't find the container with id 10ceb5d7bf1ab8799d2d239a5bc6fa1f395132126cf941d170b3f2df46c99c61 Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.417902 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" event={"ID":"ebe64ded-4336-40a1-8b88-c793b0a7f332","Type":"ContainerStarted","Data":"95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472"} Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.418183 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" event={"ID":"ebe64ded-4336-40a1-8b88-c793b0a7f332","Type":"ContainerStarted","Data":"10ceb5d7bf1ab8799d2d239a5bc6fa1f395132126cf941d170b3f2df46c99c61"} Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.418205 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.419057 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" event={"ID":"b220c77a-c606-4edf-b103-88b10e46a391","Type":"ContainerStarted","Data":"4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48"} Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.419095 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" event={"ID":"b220c77a-c606-4edf-b103-88b10e46a391","Type":"ContainerStarted","Data":"d7faaadd8b29903dc940f5b0c43598abb38d446b77d710f611bd4daa4d9e63b8"} Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.419456 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.423273 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.426634 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.438340 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" podStartSLOduration=3.438323843 podStartE2EDuration="3.438323843s" podCreationTimestamp="2026-03-20 15:44:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:44:24.434717267 +0000 UTC m=+393.857420108" watchObservedRunningTime="2026-03-20 15:44:24.438323843 +0000 UTC m=+393.861026684" Mar 20 15:44:24 crc kubenswrapper[4813]: I0320 15:44:24.451050 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" podStartSLOduration=3.451032148 podStartE2EDuration="3.451032148s" podCreationTimestamp="2026-03-20 15:44:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:44:24.450791782 +0000 UTC m=+393.873494623" watchObservedRunningTime="2026-03-20 15:44:24.451032148 +0000 UTC m=+393.873734989" Mar 20 15:44:41 crc kubenswrapper[4813]: I0320 15:44:41.721413 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-759b4c7f9b-66wxb"] Mar 20 15:44:41 crc kubenswrapper[4813]: I0320 15:44:41.722223 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" podUID="b220c77a-c606-4edf-b103-88b10e46a391" containerName="controller-manager" containerID="cri-o://4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48" gracePeriod=30 Mar 20 15:44:41 crc kubenswrapper[4813]: I0320 15:44:41.739190 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x"] Mar 20 15:44:41 crc kubenswrapper[4813]: I0320 15:44:41.739424 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" podUID="ebe64ded-4336-40a1-8b88-c793b0a7f332" containerName="route-controller-manager" containerID="cri-o://95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472" gracePeriod=30 Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.263524 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.295890 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-client-ca\") pod \"ebe64ded-4336-40a1-8b88-c793b0a7f332\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.295993 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp98q\" (UniqueName: \"kubernetes.io/projected/ebe64ded-4336-40a1-8b88-c793b0a7f332-kube-api-access-bp98q\") pod \"ebe64ded-4336-40a1-8b88-c793b0a7f332\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.296082 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebe64ded-4336-40a1-8b88-c793b0a7f332-serving-cert\") pod \"ebe64ded-4336-40a1-8b88-c793b0a7f332\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.296114 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-config\") pod \"ebe64ded-4336-40a1-8b88-c793b0a7f332\" (UID: \"ebe64ded-4336-40a1-8b88-c793b0a7f332\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.296782 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-config" (OuterVolumeSpecName: "config") pod "ebe64ded-4336-40a1-8b88-c793b0a7f332" (UID: "ebe64ded-4336-40a1-8b88-c793b0a7f332"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.296880 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-client-ca" (OuterVolumeSpecName: "client-ca") pod "ebe64ded-4336-40a1-8b88-c793b0a7f332" (UID: "ebe64ded-4336-40a1-8b88-c793b0a7f332"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.300905 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebe64ded-4336-40a1-8b88-c793b0a7f332-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ebe64ded-4336-40a1-8b88-c793b0a7f332" (UID: "ebe64ded-4336-40a1-8b88-c793b0a7f332"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.301013 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe64ded-4336-40a1-8b88-c793b0a7f332-kube-api-access-bp98q" (OuterVolumeSpecName: "kube-api-access-bp98q") pod "ebe64ded-4336-40a1-8b88-c793b0a7f332" (UID: "ebe64ded-4336-40a1-8b88-c793b0a7f332"). InnerVolumeSpecName "kube-api-access-bp98q". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.344814 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.396927 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b220c77a-c606-4edf-b103-88b10e46a391-serving-cert\") pod \"b220c77a-c606-4edf-b103-88b10e46a391\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.397084 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkn4m\" (UniqueName: \"kubernetes.io/projected/b220c77a-c606-4edf-b103-88b10e46a391-kube-api-access-mkn4m\") pod \"b220c77a-c606-4edf-b103-88b10e46a391\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.397711 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-config\") pod \"b220c77a-c606-4edf-b103-88b10e46a391\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.397773 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-client-ca\") pod \"b220c77a-c606-4edf-b103-88b10e46a391\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.397812 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-proxy-ca-bundles\") pod \"b220c77a-c606-4edf-b103-88b10e46a391\" (UID: \"b220c77a-c606-4edf-b103-88b10e46a391\") " Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.398124 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebe64ded-4336-40a1-8b88-c793b0a7f332-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.398141 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.398134 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-client-ca" (OuterVolumeSpecName: "client-ca") pod "b220c77a-c606-4edf-b103-88b10e46a391" (UID: "b220c77a-c606-4edf-b103-88b10e46a391"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.398154 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebe64ded-4336-40a1-8b88-c793b0a7f332-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.398206 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp98q\" (UniqueName: \"kubernetes.io/projected/ebe64ded-4336-40a1-8b88-c793b0a7f332-kube-api-access-bp98q\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.398588 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-config" (OuterVolumeSpecName: "config") pod "b220c77a-c606-4edf-b103-88b10e46a391" (UID: "b220c77a-c606-4edf-b103-88b10e46a391"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.398626 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b220c77a-c606-4edf-b103-88b10e46a391" (UID: "b220c77a-c606-4edf-b103-88b10e46a391"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.399787 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b220c77a-c606-4edf-b103-88b10e46a391-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b220c77a-c606-4edf-b103-88b10e46a391" (UID: "b220c77a-c606-4edf-b103-88b10e46a391"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.400742 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b220c77a-c606-4edf-b103-88b10e46a391-kube-api-access-mkn4m" (OuterVolumeSpecName: "kube-api-access-mkn4m") pod "b220c77a-c606-4edf-b103-88b10e46a391" (UID: "b220c77a-c606-4edf-b103-88b10e46a391"). InnerVolumeSpecName "kube-api-access-mkn4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.499418 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkn4m\" (UniqueName: \"kubernetes.io/projected/b220c77a-c606-4edf-b103-88b10e46a391-kube-api-access-mkn4m\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.499450 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.499460 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.499470 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b220c77a-c606-4edf-b103-88b10e46a391-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.499493 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b220c77a-c606-4edf-b103-88b10e46a391-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.523727 4813 generic.go:334] "Generic (PLEG): container finished" podID="ebe64ded-4336-40a1-8b88-c793b0a7f332" containerID="95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472" exitCode=0 Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.523829 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" event={"ID":"ebe64ded-4336-40a1-8b88-c793b0a7f332","Type":"ContainerDied","Data":"95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472"} Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.523861 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" event={"ID":"ebe64ded-4336-40a1-8b88-c793b0a7f332","Type":"ContainerDied","Data":"10ceb5d7bf1ab8799d2d239a5bc6fa1f395132126cf941d170b3f2df46c99c61"} Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.523879 4813 scope.go:117] "RemoveContainer" containerID="95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.523828 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.526518 4813 generic.go:334] "Generic (PLEG): container finished" podID="b220c77a-c606-4edf-b103-88b10e46a391" containerID="4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48" exitCode=0 Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.526556 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" event={"ID":"b220c77a-c606-4edf-b103-88b10e46a391","Type":"ContainerDied","Data":"4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48"} Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.526589 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" event={"ID":"b220c77a-c606-4edf-b103-88b10e46a391","Type":"ContainerDied","Data":"d7faaadd8b29903dc940f5b0c43598abb38d446b77d710f611bd4daa4d9e63b8"} Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.526675 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-759b4c7f9b-66wxb" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.551758 4813 scope.go:117] "RemoveContainer" containerID="95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472" Mar 20 15:44:42 crc kubenswrapper[4813]: E0320 15:44:42.552666 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472\": container with ID starting with 95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472 not found: ID does not exist" containerID="95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.552709 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472"} err="failed to get container status \"95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472\": rpc error: code = NotFound desc = could not find container \"95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472\": container with ID starting with 95ef600d6d3715c183448924e5a649364a52457d0236f9fc04732e2f26823472 not found: ID does not exist" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.552736 4813 scope.go:117] "RemoveContainer" containerID="4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.557532 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x"] Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.561421 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5479c9f4cd-6ht7x"] Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.573365 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-759b4c7f9b-66wxb"] Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.574691 4813 scope.go:117] "RemoveContainer" containerID="4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48" Mar 20 15:44:42 crc kubenswrapper[4813]: E0320 15:44:42.575284 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48\": container with ID starting with 4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48 not found: ID does not exist" containerID="4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.575410 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48"} err="failed to get container status \"4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48\": rpc error: code = NotFound desc = could not find container \"4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48\": container with ID starting with 4776adc1364a303d1b43446a30cfd66be6ef775a6ba9d0ac8e31031397353a48 not found: ID does not exist" Mar 20 15:44:42 crc kubenswrapper[4813]: I0320 15:44:42.576800 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-759b4c7f9b-66wxb"] Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.030997 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-8rzs8"] Mar 20 15:44:43 crc kubenswrapper[4813]: E0320 15:44:43.031569 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b220c77a-c606-4edf-b103-88b10e46a391" containerName="controller-manager" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.031587 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b220c77a-c606-4edf-b103-88b10e46a391" containerName="controller-manager" Mar 20 15:44:43 crc kubenswrapper[4813]: E0320 15:44:43.031600 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe64ded-4336-40a1-8b88-c793b0a7f332" containerName="route-controller-manager" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.031607 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe64ded-4336-40a1-8b88-c793b0a7f332" containerName="route-controller-manager" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.031727 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe64ded-4336-40a1-8b88-c793b0a7f332" containerName="route-controller-manager" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.031745 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b220c77a-c606-4edf-b103-88b10e46a391" containerName="controller-manager" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.032203 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.033827 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.034268 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.034287 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.034915 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.036213 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.036769 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz"] Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.037808 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.041749 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.041939 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.042021 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.042068 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.042206 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.042840 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.043187 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.044511 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-8rzs8"] Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.048608 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.054341 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz"] Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106214 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-config\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106270 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-serving-cert\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106415 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vb58\" (UniqueName: \"kubernetes.io/projected/264536e9-9ded-4743-a26e-24cd5a55e2ac-kube-api-access-6vb58\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106499 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-client-ca\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106535 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/264536e9-9ded-4743-a26e-24cd5a55e2ac-serving-cert\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106564 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-client-ca\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106601 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-proxy-ca-bundles\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106628 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfbgl\" (UniqueName: \"kubernetes.io/projected/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-kube-api-access-dfbgl\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.106696 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-config\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.207926 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-client-ca\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.207965 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-proxy-ca-bundles\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.207990 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfbgl\" (UniqueName: \"kubernetes.io/projected/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-kube-api-access-dfbgl\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.208024 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-config\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.208088 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-config\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.208109 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-serving-cert\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.208141 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vb58\" (UniqueName: \"kubernetes.io/projected/264536e9-9ded-4743-a26e-24cd5a55e2ac-kube-api-access-6vb58\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.208166 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-client-ca\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.208189 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/264536e9-9ded-4743-a26e-24cd5a55e2ac-serving-cert\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.209297 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-proxy-ca-bundles\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.209636 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-client-ca\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.209660 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-client-ca\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.210556 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-config\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.210800 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/264536e9-9ded-4743-a26e-24cd5a55e2ac-config\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.212112 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-serving-cert\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.212973 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/264536e9-9ded-4743-a26e-24cd5a55e2ac-serving-cert\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.226586 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfbgl\" (UniqueName: \"kubernetes.io/projected/d2339af9-dd9c-4eb9-9774-2209e9b46f5e-kube-api-access-dfbgl\") pod \"route-controller-manager-6f8d66c474-n6cpz\" (UID: \"d2339af9-dd9c-4eb9-9774-2209e9b46f5e\") " pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.230055 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vb58\" (UniqueName: \"kubernetes.io/projected/264536e9-9ded-4743-a26e-24cd5a55e2ac-kube-api-access-6vb58\") pod \"controller-manager-59d77b64d6-8rzs8\" (UID: \"264536e9-9ded-4743-a26e-24cd5a55e2ac\") " pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.273970 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b220c77a-c606-4edf-b103-88b10e46a391" path="/var/lib/kubelet/pods/b220c77a-c606-4edf-b103-88b10e46a391/volumes" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.274767 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe64ded-4336-40a1-8b88-c793b0a7f332" path="/var/lib/kubelet/pods/ebe64ded-4336-40a1-8b88-c793b0a7f332/volumes" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.356746 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.379290 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.616835 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz"] Mar 20 15:44:43 crc kubenswrapper[4813]: W0320 15:44:43.780691 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod264536e9_9ded_4743_a26e_24cd5a55e2ac.slice/crio-9ef9b9c6f18e0f1ec625f7d006edcace812600616a6d1fdfc542f1607de4aa24 WatchSource:0}: Error finding container 9ef9b9c6f18e0f1ec625f7d006edcace812600616a6d1fdfc542f1607de4aa24: Status 404 returned error can't find the container with id 9ef9b9c6f18e0f1ec625f7d006edcace812600616a6d1fdfc542f1607de4aa24 Mar 20 15:44:43 crc kubenswrapper[4813]: I0320 15:44:43.781057 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59d77b64d6-8rzs8"] Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.538797 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" event={"ID":"264536e9-9ded-4743-a26e-24cd5a55e2ac","Type":"ContainerStarted","Data":"05d3e08a81cd005abebec0cdf338f0ac723f1d7d7501596f013795f2143fd225"} Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.538845 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" event={"ID":"264536e9-9ded-4743-a26e-24cd5a55e2ac","Type":"ContainerStarted","Data":"9ef9b9c6f18e0f1ec625f7d006edcace812600616a6d1fdfc542f1607de4aa24"} Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.539115 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.541616 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" event={"ID":"d2339af9-dd9c-4eb9-9774-2209e9b46f5e","Type":"ContainerStarted","Data":"afca31e043a67fd527dc70e86ae46461afa1e4c80b69cf21a4156e360afc2753"} Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.541647 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" event={"ID":"d2339af9-dd9c-4eb9-9774-2209e9b46f5e","Type":"ContainerStarted","Data":"0fc2e9b1b1910d6486fd80a0fb018422537a4624906a0529449dc84d43d886a8"} Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.541855 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.545803 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.548529 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.561613 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-59d77b64d6-8rzs8" podStartSLOduration=3.561587942 podStartE2EDuration="3.561587942s" podCreationTimestamp="2026-03-20 15:44:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:44:44.561471829 +0000 UTC m=+413.984174670" watchObservedRunningTime="2026-03-20 15:44:44.561587942 +0000 UTC m=+413.984290863" Mar 20 15:44:44 crc kubenswrapper[4813]: I0320 15:44:44.579923 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6f8d66c474-n6cpz" podStartSLOduration=3.579905875 podStartE2EDuration="3.579905875s" podCreationTimestamp="2026-03-20 15:44:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:44:44.57854824 +0000 UTC m=+414.001251081" watchObservedRunningTime="2026-03-20 15:44:44.579905875 +0000 UTC m=+414.002608716" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.132563 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq"] Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.133932 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.138000 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.139104 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.180837 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq"] Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.276984 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87gj6\" (UniqueName: \"kubernetes.io/projected/81d21365-a70e-4490-a592-ee8f126e1e61-kube-api-access-87gj6\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.277129 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81d21365-a70e-4490-a592-ee8f126e1e61-secret-volume\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.277168 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81d21365-a70e-4490-a592-ee8f126e1e61-config-volume\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.377998 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81d21365-a70e-4490-a592-ee8f126e1e61-secret-volume\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.378041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81d21365-a70e-4490-a592-ee8f126e1e61-config-volume\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.378076 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87gj6\" (UniqueName: \"kubernetes.io/projected/81d21365-a70e-4490-a592-ee8f126e1e61-kube-api-access-87gj6\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.378930 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81d21365-a70e-4490-a592-ee8f126e1e61-config-volume\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.384274 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81d21365-a70e-4490-a592-ee8f126e1e61-secret-volume\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.395895 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87gj6\" (UniqueName: \"kubernetes.io/projected/81d21365-a70e-4490-a592-ee8f126e1e61-kube-api-access-87gj6\") pod \"collect-profiles-29567025-flhmq\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.456084 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:00 crc kubenswrapper[4813]: I0320 15:45:00.871830 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq"] Mar 20 15:45:01 crc kubenswrapper[4813]: I0320 15:45:01.644397 4813 generic.go:334] "Generic (PLEG): container finished" podID="81d21365-a70e-4490-a592-ee8f126e1e61" containerID="7cceb3b630aa7f2df354e65c2c9161a1ef91016618af00665a7da30e2f6daf89" exitCode=0 Mar 20 15:45:01 crc kubenswrapper[4813]: I0320 15:45:01.644568 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" event={"ID":"81d21365-a70e-4490-a592-ee8f126e1e61","Type":"ContainerDied","Data":"7cceb3b630aa7f2df354e65c2c9161a1ef91016618af00665a7da30e2f6daf89"} Mar 20 15:45:01 crc kubenswrapper[4813]: I0320 15:45:01.644769 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" event={"ID":"81d21365-a70e-4490-a592-ee8f126e1e61","Type":"ContainerStarted","Data":"53d2217ce433c3eda4528776dbc123b5795ad70be771a3a0df5ae3ea252d58a2"} Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.026631 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.115131 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81d21365-a70e-4490-a592-ee8f126e1e61-secret-volume\") pod \"81d21365-a70e-4490-a592-ee8f126e1e61\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.115215 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81d21365-a70e-4490-a592-ee8f126e1e61-config-volume\") pod \"81d21365-a70e-4490-a592-ee8f126e1e61\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.115236 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87gj6\" (UniqueName: \"kubernetes.io/projected/81d21365-a70e-4490-a592-ee8f126e1e61-kube-api-access-87gj6\") pod \"81d21365-a70e-4490-a592-ee8f126e1e61\" (UID: \"81d21365-a70e-4490-a592-ee8f126e1e61\") " Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.116225 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81d21365-a70e-4490-a592-ee8f126e1e61-config-volume" (OuterVolumeSpecName: "config-volume") pod "81d21365-a70e-4490-a592-ee8f126e1e61" (UID: "81d21365-a70e-4490-a592-ee8f126e1e61"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.120467 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81d21365-a70e-4490-a592-ee8f126e1e61-kube-api-access-87gj6" (OuterVolumeSpecName: "kube-api-access-87gj6") pod "81d21365-a70e-4490-a592-ee8f126e1e61" (UID: "81d21365-a70e-4490-a592-ee8f126e1e61"). InnerVolumeSpecName "kube-api-access-87gj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.120656 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d21365-a70e-4490-a592-ee8f126e1e61-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "81d21365-a70e-4490-a592-ee8f126e1e61" (UID: "81d21365-a70e-4490-a592-ee8f126e1e61"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.216784 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81d21365-a70e-4490-a592-ee8f126e1e61-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.216867 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81d21365-a70e-4490-a592-ee8f126e1e61-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.216897 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87gj6\" (UniqueName: \"kubernetes.io/projected/81d21365-a70e-4490-a592-ee8f126e1e61-kube-api-access-87gj6\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.332459 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qb57x"] Mar 20 15:45:03 crc kubenswrapper[4813]: E0320 15:45:03.332706 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d21365-a70e-4490-a592-ee8f126e1e61" containerName="collect-profiles" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.332719 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d21365-a70e-4490-a592-ee8f126e1e61" containerName="collect-profiles" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.332804 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="81d21365-a70e-4490-a592-ee8f126e1e61" containerName="collect-profiles" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.333163 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.354319 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qb57x"] Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419417 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704968cf-4e29-45ce-9b8b-610920e4a5b1-trusted-ca\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419544 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704968cf-4e29-45ce-9b8b-610920e4a5b1-registry-certificates\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419565 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-bound-sa-token\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419590 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-registry-tls\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419628 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704968cf-4e29-45ce-9b8b-610920e4a5b1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419762 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704968cf-4e29-45ce-9b8b-610920e4a5b1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419854 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.419906 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgjnn\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-kube-api-access-lgjnn\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.445506 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.520607 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704968cf-4e29-45ce-9b8b-610920e4a5b1-trusted-ca\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.520655 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704968cf-4e29-45ce-9b8b-610920e4a5b1-registry-certificates\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.520671 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-bound-sa-token\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.520695 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-registry-tls\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.520714 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704968cf-4e29-45ce-9b8b-610920e4a5b1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.520738 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704968cf-4e29-45ce-9b8b-610920e4a5b1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.520767 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgjnn\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-kube-api-access-lgjnn\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.521816 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704968cf-4e29-45ce-9b8b-610920e4a5b1-trusted-ca\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.522058 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704968cf-4e29-45ce-9b8b-610920e4a5b1-registry-certificates\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.522330 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704968cf-4e29-45ce-9b8b-610920e4a5b1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.524533 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-registry-tls\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.526506 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704968cf-4e29-45ce-9b8b-610920e4a5b1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.538240 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-bound-sa-token\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.540905 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgjnn\" (UniqueName: \"kubernetes.io/projected/704968cf-4e29-45ce-9b8b-610920e4a5b1-kube-api-access-lgjnn\") pod \"image-registry-66df7c8f76-qb57x\" (UID: \"704968cf-4e29-45ce-9b8b-610920e4a5b1\") " pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.656238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" event={"ID":"81d21365-a70e-4490-a592-ee8f126e1e61","Type":"ContainerDied","Data":"53d2217ce433c3eda4528776dbc123b5795ad70be771a3a0df5ae3ea252d58a2"} Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.656281 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53d2217ce433c3eda4528776dbc123b5795ad70be771a3a0df5ae3ea252d58a2" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.656380 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq" Mar 20 15:45:03 crc kubenswrapper[4813]: I0320 15:45:03.657279 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:04 crc kubenswrapper[4813]: I0320 15:45:04.047934 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qb57x"] Mar 20 15:45:04 crc kubenswrapper[4813]: W0320 15:45:04.054331 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod704968cf_4e29_45ce_9b8b_610920e4a5b1.slice/crio-e4cbb98f9ef69cfc90834877ba8729e992cb92ea238e115848f95055d363f470 WatchSource:0}: Error finding container e4cbb98f9ef69cfc90834877ba8729e992cb92ea238e115848f95055d363f470: Status 404 returned error can't find the container with id e4cbb98f9ef69cfc90834877ba8729e992cb92ea238e115848f95055d363f470 Mar 20 15:45:04 crc kubenswrapper[4813]: I0320 15:45:04.663176 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" event={"ID":"704968cf-4e29-45ce-9b8b-610920e4a5b1","Type":"ContainerStarted","Data":"157aa9f6a8a58b19e29ba9ab8b2ec7c409be84dab399e685f4cf0693d5b993c1"} Mar 20 15:45:04 crc kubenswrapper[4813]: I0320 15:45:04.663226 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" event={"ID":"704968cf-4e29-45ce-9b8b-610920e4a5b1","Type":"ContainerStarted","Data":"e4cbb98f9ef69cfc90834877ba8729e992cb92ea238e115848f95055d363f470"} Mar 20 15:45:04 crc kubenswrapper[4813]: I0320 15:45:04.663333 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:04 crc kubenswrapper[4813]: I0320 15:45:04.686148 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" podStartSLOduration=1.6861278020000001 podStartE2EDuration="1.686127802s" podCreationTimestamp="2026-03-20 15:45:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:45:04.686087071 +0000 UTC m=+434.108789922" watchObservedRunningTime="2026-03-20 15:45:04.686127802 +0000 UTC m=+434.108830643" Mar 20 15:45:23 crc kubenswrapper[4813]: I0320 15:45:23.664578 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-qb57x" Mar 20 15:45:23 crc kubenswrapper[4813]: I0320 15:45:23.727016 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b4swq"] Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.965006 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b777z"] Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.965944 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-b777z" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="registry-server" containerID="cri-o://42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4" gracePeriod=30 Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.974811 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wsh8b"] Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.976742 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wsh8b" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="registry-server" containerID="cri-o://dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979" gracePeriod=30 Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.979967 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crtz5"] Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.980262 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" containerID="cri-o://98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb" gracePeriod=30 Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.996429 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-grn6l"] Mar 20 15:45:30 crc kubenswrapper[4813]: I0320 15:45:30.996860 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-grn6l" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="registry-server" containerID="cri-o://1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb" gracePeriod=30 Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.000589 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qrqgm"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.010061 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.018068 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qrqgm"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.028575 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wv9qh"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.028881 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wv9qh" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="registry-server" containerID="cri-o://fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb" gracePeriod=30 Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.115274 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g67lh\" (UniqueName: \"kubernetes.io/projected/d700a123-9a6b-4d44-89b2-73e09fe026b3-kube-api-access-g67lh\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.115336 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d700a123-9a6b-4d44-89b2-73e09fe026b3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.115368 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d700a123-9a6b-4d44-89b2-73e09fe026b3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.216445 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g67lh\" (UniqueName: \"kubernetes.io/projected/d700a123-9a6b-4d44-89b2-73e09fe026b3-kube-api-access-g67lh\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.216531 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d700a123-9a6b-4d44-89b2-73e09fe026b3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.216577 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d700a123-9a6b-4d44-89b2-73e09fe026b3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.218788 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d700a123-9a6b-4d44-89b2-73e09fe026b3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.223024 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d700a123-9a6b-4d44-89b2-73e09fe026b3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.235709 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g67lh\" (UniqueName: \"kubernetes.io/projected/d700a123-9a6b-4d44-89b2-73e09fe026b3-kube-api-access-g67lh\") pod \"marketplace-operator-79b997595-qrqgm\" (UID: \"d700a123-9a6b-4d44-89b2-73e09fe026b3\") " pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.338308 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.434035 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.518748 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.531186 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.535852 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.568985 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.624923 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-utilities\") pod \"6f6910f6-780a-428c-a21b-f6702a912af1\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.624960 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtt8x\" (UniqueName: \"kubernetes.io/projected/7c3f7704-b939-4e16-b4fb-1addffc3091d-kube-api-access-vtt8x\") pod \"7c3f7704-b939-4e16-b4fb-1addffc3091d\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.624981 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x6tn\" (UniqueName: \"kubernetes.io/projected/3088bb81-3f95-4383-bbd5-ef89df01a20f-kube-api-access-8x6tn\") pod \"3088bb81-3f95-4383-bbd5-ef89df01a20f\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.624998 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-utilities\") pod \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625021 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-utilities\") pod \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625036 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-catalog-content\") pod \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625055 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-operator-metrics\") pod \"3088bb81-3f95-4383-bbd5-ef89df01a20f\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625075 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-utilities\") pod \"7c3f7704-b939-4e16-b4fb-1addffc3091d\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625093 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z49s7\" (UniqueName: \"kubernetes.io/projected/6f6910f6-780a-428c-a21b-f6702a912af1-kube-api-access-z49s7\") pod \"6f6910f6-780a-428c-a21b-f6702a912af1\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625124 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-catalog-content\") pod \"7c3f7704-b939-4e16-b4fb-1addffc3091d\" (UID: \"7c3f7704-b939-4e16-b4fb-1addffc3091d\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625144 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-catalog-content\") pod \"6f6910f6-780a-428c-a21b-f6702a912af1\" (UID: \"6f6910f6-780a-428c-a21b-f6702a912af1\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625166 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vk4d\" (UniqueName: \"kubernetes.io/projected/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-kube-api-access-7vk4d\") pod \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625182 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-catalog-content\") pod \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\" (UID: \"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625198 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qv4n\" (UniqueName: \"kubernetes.io/projected/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-kube-api-access-6qv4n\") pod \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\" (UID: \"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625222 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-trusted-ca\") pod \"3088bb81-3f95-4383-bbd5-ef89df01a20f\" (UID: \"3088bb81-3f95-4383-bbd5-ef89df01a20f\") " Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625886 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "3088bb81-3f95-4383-bbd5-ef89df01a20f" (UID: "3088bb81-3f95-4383-bbd5-ef89df01a20f"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.625969 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-utilities" (OuterVolumeSpecName: "utilities") pod "6f6910f6-780a-428c-a21b-f6702a912af1" (UID: "6f6910f6-780a-428c-a21b-f6702a912af1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.626909 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-utilities" (OuterVolumeSpecName: "utilities") pod "6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" (UID: "6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.626972 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-utilities" (OuterVolumeSpecName: "utilities") pod "7c3f7704-b939-4e16-b4fb-1addffc3091d" (UID: "7c3f7704-b939-4e16-b4fb-1addffc3091d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.627887 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-utilities" (OuterVolumeSpecName: "utilities") pod "e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" (UID: "e0d612e1-08ef-4c5c-9e8c-f2636e9fa705"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.632592 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "3088bb81-3f95-4383-bbd5-ef89df01a20f" (UID: "3088bb81-3f95-4383-bbd5-ef89df01a20f"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.632776 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3088bb81-3f95-4383-bbd5-ef89df01a20f-kube-api-access-8x6tn" (OuterVolumeSpecName: "kube-api-access-8x6tn") pod "3088bb81-3f95-4383-bbd5-ef89df01a20f" (UID: "3088bb81-3f95-4383-bbd5-ef89df01a20f"). InnerVolumeSpecName "kube-api-access-8x6tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.632848 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-kube-api-access-7vk4d" (OuterVolumeSpecName: "kube-api-access-7vk4d") pod "6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" (UID: "6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f"). InnerVolumeSpecName "kube-api-access-7vk4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.634558 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-kube-api-access-6qv4n" (OuterVolumeSpecName: "kube-api-access-6qv4n") pod "e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" (UID: "e0d612e1-08ef-4c5c-9e8c-f2636e9fa705"). InnerVolumeSpecName "kube-api-access-6qv4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.634671 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f6910f6-780a-428c-a21b-f6702a912af1-kube-api-access-z49s7" (OuterVolumeSpecName: "kube-api-access-z49s7") pod "6f6910f6-780a-428c-a21b-f6702a912af1" (UID: "6f6910f6-780a-428c-a21b-f6702a912af1"). InnerVolumeSpecName "kube-api-access-z49s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.646769 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c3f7704-b939-4e16-b4fb-1addffc3091d-kube-api-access-vtt8x" (OuterVolumeSpecName: "kube-api-access-vtt8x") pod "7c3f7704-b939-4e16-b4fb-1addffc3091d" (UID: "7c3f7704-b939-4e16-b4fb-1addffc3091d"). InnerVolumeSpecName "kube-api-access-vtt8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.655158 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" (UID: "e0d612e1-08ef-4c5c-9e8c-f2636e9fa705"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.687929 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f6910f6-780a-428c-a21b-f6702a912af1" (UID: "6f6910f6-780a-428c-a21b-f6702a912af1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.699408 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" (UID: "6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726129 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726165 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z49s7\" (UniqueName: \"kubernetes.io/projected/6f6910f6-780a-428c-a21b-f6702a912af1-kube-api-access-z49s7\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726179 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726191 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vk4d\" (UniqueName: \"kubernetes.io/projected/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-kube-api-access-7vk4d\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726204 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726215 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qv4n\" (UniqueName: \"kubernetes.io/projected/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-kube-api-access-6qv4n\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726226 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726238 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f6910f6-780a-428c-a21b-f6702a912af1-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726249 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtt8x\" (UniqueName: \"kubernetes.io/projected/7c3f7704-b939-4e16-b4fb-1addffc3091d-kube-api-access-vtt8x\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726260 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x6tn\" (UniqueName: \"kubernetes.io/projected/3088bb81-3f95-4383-bbd5-ef89df01a20f-kube-api-access-8x6tn\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726270 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726281 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726293 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.726305 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3088bb81-3f95-4383-bbd5-ef89df01a20f-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.771853 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c3f7704-b939-4e16-b4fb-1addffc3091d" (UID: "7c3f7704-b939-4e16-b4fb-1addffc3091d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.824810 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qrqgm"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.827601 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c3f7704-b939-4e16-b4fb-1addffc3091d-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.833782 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grn6l" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.833901 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grn6l" event={"ID":"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705","Type":"ContainerDied","Data":"1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.833718 4813 generic.go:334] "Generic (PLEG): container finished" podID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerID="1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb" exitCode=0 Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.834085 4813 scope.go:117] "RemoveContainer" containerID="1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.834243 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grn6l" event={"ID":"e0d612e1-08ef-4c5c-9e8c-f2636e9fa705","Type":"ContainerDied","Data":"3eb278335fa28c5b292a5f20e44852a7a12c0901b223128a6db35ee26dafdc74"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.839109 4813 generic.go:334] "Generic (PLEG): container finished" podID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerID="dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979" exitCode=0 Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.839162 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wsh8b" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.839180 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wsh8b" event={"ID":"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f","Type":"ContainerDied","Data":"dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.839529 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wsh8b" event={"ID":"6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f","Type":"ContainerDied","Data":"dc4dff779584a325ff3cb25eb59c9a1b997cad6b681800bfc02b4d38ea7c6cc5"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.848071 4813 generic.go:334] "Generic (PLEG): container finished" podID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerID="fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb" exitCode=0 Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.848125 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wv9qh" event={"ID":"7c3f7704-b939-4e16-b4fb-1addffc3091d","Type":"ContainerDied","Data":"fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.848149 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wv9qh" event={"ID":"7c3f7704-b939-4e16-b4fb-1addffc3091d","Type":"ContainerDied","Data":"4177cc3b1bb1d493ec4e172b0295e6941d7f5bdc60870fd4ae3c181ac3f118c9"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.848223 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wv9qh" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.851839 4813 scope.go:117] "RemoveContainer" containerID="53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.861069 4813 generic.go:334] "Generic (PLEG): container finished" podID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerID="98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb" exitCode=0 Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.861219 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" event={"ID":"3088bb81-3f95-4383-bbd5-ef89df01a20f","Type":"ContainerDied","Data":"98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.861337 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" event={"ID":"3088bb81-3f95-4383-bbd5-ef89df01a20f","Type":"ContainerDied","Data":"49ab4f97d30558b888bfe2192cf038288fefa3b23912c8809195e1cc65e05656"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.861552 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crtz5" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.864745 4813 generic.go:334] "Generic (PLEG): container finished" podID="6f6910f6-780a-428c-a21b-f6702a912af1" containerID="42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4" exitCode=0 Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.864786 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b777z" event={"ID":"6f6910f6-780a-428c-a21b-f6702a912af1","Type":"ContainerDied","Data":"42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.864812 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b777z" event={"ID":"6f6910f6-780a-428c-a21b-f6702a912af1","Type":"ContainerDied","Data":"354b2c37d8f887b53dd812c768944bf6901dae294178cb9a243874681b1a01ad"} Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.864887 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b777z" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.897998 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-grn6l"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.900020 4813 scope.go:117] "RemoveContainer" containerID="a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.908404 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-grn6l"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.915453 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wsh8b"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.918622 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wsh8b"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.923866 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wv9qh"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.928702 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wv9qh"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.934641 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crtz5"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.941367 4813 scope.go:117] "RemoveContainer" containerID="1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.941507 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crtz5"] Mar 20 15:45:31 crc kubenswrapper[4813]: E0320 15:45:31.941911 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb\": container with ID starting with 1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb not found: ID does not exist" containerID="1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.941943 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb"} err="failed to get container status \"1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb\": rpc error: code = NotFound desc = could not find container \"1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb\": container with ID starting with 1018178df4d2ec46ce00f3fdcfb052809992c2b8694fb56564d51ea14cdfd7bb not found: ID does not exist" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.941970 4813 scope.go:117] "RemoveContainer" containerID="53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb" Mar 20 15:45:31 crc kubenswrapper[4813]: E0320 15:45:31.942268 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb\": container with ID starting with 53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb not found: ID does not exist" containerID="53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.942291 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb"} err="failed to get container status \"53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb\": rpc error: code = NotFound desc = could not find container \"53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb\": container with ID starting with 53d5fc0fa9bcaaf68340a8900fa07b65187953aafe6b482f83eb8e16f4d8adfb not found: ID does not exist" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.942307 4813 scope.go:117] "RemoveContainer" containerID="a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2" Mar 20 15:45:31 crc kubenswrapper[4813]: E0320 15:45:31.942701 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2\": container with ID starting with a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2 not found: ID does not exist" containerID="a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.942724 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2"} err="failed to get container status \"a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2\": rpc error: code = NotFound desc = could not find container \"a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2\": container with ID starting with a8a7ef53d729d3875cedbd75b374194d2d108d720a7b81ed553f5da318b8e2d2 not found: ID does not exist" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.942739 4813 scope.go:117] "RemoveContainer" containerID="dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.943754 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b777z"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.950947 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-b777z"] Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.969068 4813 scope.go:117] "RemoveContainer" containerID="f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b" Mar 20 15:45:31 crc kubenswrapper[4813]: I0320 15:45:31.988039 4813 scope.go:117] "RemoveContainer" containerID="a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.006146 4813 scope.go:117] "RemoveContainer" containerID="dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.007021 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979\": container with ID starting with dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979 not found: ID does not exist" containerID="dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.007074 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979"} err="failed to get container status \"dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979\": rpc error: code = NotFound desc = could not find container \"dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979\": container with ID starting with dba3da74bfd4abe1c85987e34503a7b956887db12d23f3af9900c4a783cbc979 not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.007110 4813 scope.go:117] "RemoveContainer" containerID="f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.007650 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b\": container with ID starting with f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b not found: ID does not exist" containerID="f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.007687 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b"} err="failed to get container status \"f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b\": rpc error: code = NotFound desc = could not find container \"f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b\": container with ID starting with f32dc580da5e9b62e08dcd7bf87603bfd45b742e06446c4c1c04078cc04aeb0b not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.007714 4813 scope.go:117] "RemoveContainer" containerID="a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.008002 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760\": container with ID starting with a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760 not found: ID does not exist" containerID="a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.008024 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760"} err="failed to get container status \"a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760\": rpc error: code = NotFound desc = could not find container \"a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760\": container with ID starting with a308332436565cbb7025783bc95eb07eccc13fea2526f3dd0804502efe2da760 not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.008038 4813 scope.go:117] "RemoveContainer" containerID="fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.020305 4813 scope.go:117] "RemoveContainer" containerID="c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.043467 4813 scope.go:117] "RemoveContainer" containerID="3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.063184 4813 scope.go:117] "RemoveContainer" containerID="fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.064680 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb\": container with ID starting with fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb not found: ID does not exist" containerID="fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.064726 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb"} err="failed to get container status \"fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb\": rpc error: code = NotFound desc = could not find container \"fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb\": container with ID starting with fdf07dbbd2d7e8d7c85984119fea287918906c63f2d1c00b57b8550eb0284feb not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.064757 4813 scope.go:117] "RemoveContainer" containerID="c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.065086 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937\": container with ID starting with c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937 not found: ID does not exist" containerID="c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.065113 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937"} err="failed to get container status \"c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937\": rpc error: code = NotFound desc = could not find container \"c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937\": container with ID starting with c05aee1d919f371359d275f127929163b0e5f979f1b406ec56252b26acce5937 not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.065129 4813 scope.go:117] "RemoveContainer" containerID="3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.065422 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941\": container with ID starting with 3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941 not found: ID does not exist" containerID="3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.065449 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941"} err="failed to get container status \"3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941\": rpc error: code = NotFound desc = could not find container \"3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941\": container with ID starting with 3c284d4eb3cea4339467fc9376917dd4c7f678767c76df7bb605bfb900cb0941 not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.065463 4813 scope.go:117] "RemoveContainer" containerID="98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.078880 4813 scope.go:117] "RemoveContainer" containerID="3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.094892 4813 scope.go:117] "RemoveContainer" containerID="98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.095328 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb\": container with ID starting with 98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb not found: ID does not exist" containerID="98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.095361 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb"} err="failed to get container status \"98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb\": rpc error: code = NotFound desc = could not find container \"98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb\": container with ID starting with 98f0bf3ddfbeb3e33b193266a1756f28f7744b01e61927ce37f4dedf2702b6eb not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.095383 4813 scope.go:117] "RemoveContainer" containerID="3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.096338 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375\": container with ID starting with 3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375 not found: ID does not exist" containerID="3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.096379 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375"} err="failed to get container status \"3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375\": rpc error: code = NotFound desc = could not find container \"3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375\": container with ID starting with 3196b376357452b391af0b9e16db2f09e7d688b18d7d7dc90f04006ea1205375 not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.096405 4813 scope.go:117] "RemoveContainer" containerID="42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.111919 4813 scope.go:117] "RemoveContainer" containerID="b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.153289 4813 scope.go:117] "RemoveContainer" containerID="5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.167386 4813 scope.go:117] "RemoveContainer" containerID="42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.168227 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4\": container with ID starting with 42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4 not found: ID does not exist" containerID="42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.168265 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4"} err="failed to get container status \"42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4\": rpc error: code = NotFound desc = could not find container \"42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4\": container with ID starting with 42a23e0358793d60cbbc8ddff86e06c97a7e3982e2eb9d3fdabc73c05312ddf4 not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.168308 4813 scope.go:117] "RemoveContainer" containerID="b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.168736 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b\": container with ID starting with b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b not found: ID does not exist" containerID="b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.168772 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b"} err="failed to get container status \"b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b\": rpc error: code = NotFound desc = could not find container \"b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b\": container with ID starting with b5de830085d9576eb1612ee283e614d61bdc3b05b1210dc43d45ac1dc154b88b not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.168799 4813 scope.go:117] "RemoveContainer" containerID="5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c" Mar 20 15:45:32 crc kubenswrapper[4813]: E0320 15:45:32.169397 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c\": container with ID starting with 5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c not found: ID does not exist" containerID="5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.169421 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c"} err="failed to get container status \"5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c\": rpc error: code = NotFound desc = could not find container \"5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c\": container with ID starting with 5c20334094f05cf94a3e36e18c09971cd5d549b8ce731b799630881ba829546c not found: ID does not exist" Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.874160 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" event={"ID":"d700a123-9a6b-4d44-89b2-73e09fe026b3","Type":"ContainerStarted","Data":"5cc6a5fe4c544345435d59de6f0e5950ed17cb456fa4be0f35b4571143e27295"} Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.874522 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" event={"ID":"d700a123-9a6b-4d44-89b2-73e09fe026b3","Type":"ContainerStarted","Data":"5e61907994fc513f1e39f437261c016fe1dc5a49804f33f2aebcef4ece9489cd"} Mar 20 15:45:32 crc kubenswrapper[4813]: I0320 15:45:32.898652 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" podStartSLOduration=2.898631774 podStartE2EDuration="2.898631774s" podCreationTimestamp="2026-03-20 15:45:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:45:32.891355841 +0000 UTC m=+462.314058682" watchObservedRunningTime="2026-03-20 15:45:32.898631774 +0000 UTC m=+462.321334615" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.272811 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" path="/var/lib/kubelet/pods/3088bb81-3f95-4383-bbd5-ef89df01a20f/volumes" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.273445 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" path="/var/lib/kubelet/pods/6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f/volumes" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.274155 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" path="/var/lib/kubelet/pods/6f6910f6-780a-428c-a21b-f6702a912af1/volumes" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.275281 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" path="/var/lib/kubelet/pods/7c3f7704-b939-4e16-b4fb-1addffc3091d/volumes" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.276062 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" path="/var/lib/kubelet/pods/e0d612e1-08ef-4c5c-9e8c-f2636e9fa705/volumes" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391299 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-22jh6"] Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391782 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391799 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391819 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391828 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391847 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391855 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391870 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391880 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391898 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391906 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391918 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391927 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391947 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391955 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391980 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.391987 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.391998 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392006 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.392022 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392030 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.392042 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392057 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.392068 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392079 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.392098 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392107 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="extract-utilities" Mar 20 15:45:33 crc kubenswrapper[4813]: E0320 15:45:33.392124 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392133 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="extract-content" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392385 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392411 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3088bb81-3f95-4383-bbd5-ef89df01a20f" containerName="marketplace-operator" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392427 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c3f7704-b939-4e16-b4fb-1addffc3091d" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392445 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6121b8b4-c0d0-4b17-8f4e-8c4a3392ad3f" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392458 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0d612e1-08ef-4c5c-9e8c-f2636e9fa705" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.392472 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f6910f6-780a-428c-a21b-f6702a912af1" containerName="registry-server" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.402045 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.405988 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.422775 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-22jh6"] Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.446433 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d857b935-7153-4ddb-bec2-b55fff235cd4-catalog-content\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.446571 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d857b935-7153-4ddb-bec2-b55fff235cd4-utilities\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.446641 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v2sb\" (UniqueName: \"kubernetes.io/projected/d857b935-7153-4ddb-bec2-b55fff235cd4-kube-api-access-4v2sb\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.547361 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v2sb\" (UniqueName: \"kubernetes.io/projected/d857b935-7153-4ddb-bec2-b55fff235cd4-kube-api-access-4v2sb\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.547461 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d857b935-7153-4ddb-bec2-b55fff235cd4-catalog-content\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.547579 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d857b935-7153-4ddb-bec2-b55fff235cd4-utilities\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.548129 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d857b935-7153-4ddb-bec2-b55fff235cd4-utilities\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.549673 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d857b935-7153-4ddb-bec2-b55fff235cd4-catalog-content\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.566713 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v2sb\" (UniqueName: \"kubernetes.io/projected/d857b935-7153-4ddb-bec2-b55fff235cd4-kube-api-access-4v2sb\") pod \"redhat-marketplace-22jh6\" (UID: \"d857b935-7153-4ddb-bec2-b55fff235cd4\") " pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.589536 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bfbr2"] Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.590744 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.593097 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.598118 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bfbr2"] Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.648419 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k7zj\" (UniqueName: \"kubernetes.io/projected/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-kube-api-access-7k7zj\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.648519 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-utilities\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.648542 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-catalog-content\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.733096 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.749521 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k7zj\" (UniqueName: \"kubernetes.io/projected/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-kube-api-access-7k7zj\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.749704 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-utilities\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.749732 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-catalog-content\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.750340 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-utilities\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.750458 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-catalog-content\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.767156 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k7zj\" (UniqueName: \"kubernetes.io/projected/e0d2a4c2-86dc-48d5-90e0-3fcc872046b6-kube-api-access-7k7zj\") pod \"certified-operators-bfbr2\" (UID: \"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6\") " pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.843087 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.843321 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.891337 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.894721 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qrqgm" Mar 20 15:45:33 crc kubenswrapper[4813]: I0320 15:45:33.936089 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.120064 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-22jh6"] Mar 20 15:45:34 crc kubenswrapper[4813]: W0320 15:45:34.124387 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd857b935_7153_4ddb_bec2_b55fff235cd4.slice/crio-75a122b37b85475495027ee2b220b14c6de8dbcf5246dac24046fdd6dd19ba71 WatchSource:0}: Error finding container 75a122b37b85475495027ee2b220b14c6de8dbcf5246dac24046fdd6dd19ba71: Status 404 returned error can't find the container with id 75a122b37b85475495027ee2b220b14c6de8dbcf5246dac24046fdd6dd19ba71 Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.315219 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bfbr2"] Mar 20 15:45:34 crc kubenswrapper[4813]: W0320 15:45:34.344572 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0d2a4c2_86dc_48d5_90e0_3fcc872046b6.slice/crio-43ea0d77a5458cf5444d750158ebca3d9a97d0ddc1990e5b31caac8358d340d9 WatchSource:0}: Error finding container 43ea0d77a5458cf5444d750158ebca3d9a97d0ddc1990e5b31caac8358d340d9: Status 404 returned error can't find the container with id 43ea0d77a5458cf5444d750158ebca3d9a97d0ddc1990e5b31caac8358d340d9 Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.897629 4813 generic.go:334] "Generic (PLEG): container finished" podID="e0d2a4c2-86dc-48d5-90e0-3fcc872046b6" containerID="209f6f93d4ed05bc96fd35a904410c92926021450fc18df2aaed9f09dc4092fc" exitCode=0 Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.897716 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfbr2" event={"ID":"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6","Type":"ContainerDied","Data":"209f6f93d4ed05bc96fd35a904410c92926021450fc18df2aaed9f09dc4092fc"} Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.897752 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfbr2" event={"ID":"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6","Type":"ContainerStarted","Data":"43ea0d77a5458cf5444d750158ebca3d9a97d0ddc1990e5b31caac8358d340d9"} Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.902728 4813 generic.go:334] "Generic (PLEG): container finished" podID="d857b935-7153-4ddb-bec2-b55fff235cd4" containerID="ed9974c26069b33ecd66d053a4261702dfe2457f2d56cd030089dd70acd46c8f" exitCode=0 Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.902921 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-22jh6" event={"ID":"d857b935-7153-4ddb-bec2-b55fff235cd4","Type":"ContainerDied","Data":"ed9974c26069b33ecd66d053a4261702dfe2457f2d56cd030089dd70acd46c8f"} Mar 20 15:45:34 crc kubenswrapper[4813]: I0320 15:45:34.902975 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-22jh6" event={"ID":"d857b935-7153-4ddb-bec2-b55fff235cd4","Type":"ContainerStarted","Data":"75a122b37b85475495027ee2b220b14c6de8dbcf5246dac24046fdd6dd19ba71"} Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.791036 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vpvn4"] Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.792151 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.796421 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.803189 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vpvn4"] Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.909339 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-22jh6" event={"ID":"d857b935-7153-4ddb-bec2-b55fff235cd4","Type":"ContainerStarted","Data":"21836ea052b496d575cd4734e96cbcefd8f54e4442b52e262acb319814025925"} Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.977998 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77c0d32a-5038-4f35-9faa-8e9cf832b213-catalog-content\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.978079 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77c0d32a-5038-4f35-9faa-8e9cf832b213-utilities\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.978907 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xml6n\" (UniqueName: \"kubernetes.io/projected/77c0d32a-5038-4f35-9faa-8e9cf832b213-kube-api-access-xml6n\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.987030 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bwvk2"] Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.988741 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.992426 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Mar 20 15:45:35 crc kubenswrapper[4813]: I0320 15:45:35.999597 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bwvk2"] Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.080806 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77c0d32a-5038-4f35-9faa-8e9cf832b213-utilities\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.081049 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xml6n\" (UniqueName: \"kubernetes.io/projected/77c0d32a-5038-4f35-9faa-8e9cf832b213-kube-api-access-xml6n\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.081191 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77c0d32a-5038-4f35-9faa-8e9cf832b213-catalog-content\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.081440 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77c0d32a-5038-4f35-9faa-8e9cf832b213-utilities\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.081678 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77c0d32a-5038-4f35-9faa-8e9cf832b213-catalog-content\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.109786 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xml6n\" (UniqueName: \"kubernetes.io/projected/77c0d32a-5038-4f35-9faa-8e9cf832b213-kube-api-access-xml6n\") pod \"redhat-operators-vpvn4\" (UID: \"77c0d32a-5038-4f35-9faa-8e9cf832b213\") " pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.131525 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.182172 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6jt\" (UniqueName: \"kubernetes.io/projected/72d4af38-f89a-48ae-bd87-96c7d479310e-kube-api-access-4h6jt\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.182377 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72d4af38-f89a-48ae-bd87-96c7d479310e-utilities\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.182424 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72d4af38-f89a-48ae-bd87-96c7d479310e-catalog-content\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.283431 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72d4af38-f89a-48ae-bd87-96c7d479310e-catalog-content\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.283546 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6jt\" (UniqueName: \"kubernetes.io/projected/72d4af38-f89a-48ae-bd87-96c7d479310e-kube-api-access-4h6jt\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.283595 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72d4af38-f89a-48ae-bd87-96c7d479310e-utilities\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.284091 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72d4af38-f89a-48ae-bd87-96c7d479310e-catalog-content\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.284159 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72d4af38-f89a-48ae-bd87-96c7d479310e-utilities\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.301243 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6jt\" (UniqueName: \"kubernetes.io/projected/72d4af38-f89a-48ae-bd87-96c7d479310e-kube-api-access-4h6jt\") pod \"community-operators-bwvk2\" (UID: \"72d4af38-f89a-48ae-bd87-96c7d479310e\") " pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.312986 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:36 crc kubenswrapper[4813]: W0320 15:45:36.564425 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77c0d32a_5038_4f35_9faa_8e9cf832b213.slice/crio-318e84f824be67ac6c952d0c4fa97c27e8075f24578c43326b8c0526f7d62a96 WatchSource:0}: Error finding container 318e84f824be67ac6c952d0c4fa97c27e8075f24578c43326b8c0526f7d62a96: Status 404 returned error can't find the container with id 318e84f824be67ac6c952d0c4fa97c27e8075f24578c43326b8c0526f7d62a96 Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.564444 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vpvn4"] Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.709025 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bwvk2"] Mar 20 15:45:36 crc kubenswrapper[4813]: W0320 15:45:36.719685 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72d4af38_f89a_48ae_bd87_96c7d479310e.slice/crio-edc7be19c953c52f201a17fc7609505bb54f54cc0787eac83ec5b6294be0da34 WatchSource:0}: Error finding container edc7be19c953c52f201a17fc7609505bb54f54cc0787eac83ec5b6294be0da34: Status 404 returned error can't find the container with id edc7be19c953c52f201a17fc7609505bb54f54cc0787eac83ec5b6294be0da34 Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.917665 4813 generic.go:334] "Generic (PLEG): container finished" podID="d857b935-7153-4ddb-bec2-b55fff235cd4" containerID="21836ea052b496d575cd4734e96cbcefd8f54e4442b52e262acb319814025925" exitCode=0 Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.917712 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-22jh6" event={"ID":"d857b935-7153-4ddb-bec2-b55fff235cd4","Type":"ContainerDied","Data":"21836ea052b496d575cd4734e96cbcefd8f54e4442b52e262acb319814025925"} Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.923904 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bwvk2" event={"ID":"72d4af38-f89a-48ae-bd87-96c7d479310e","Type":"ContainerStarted","Data":"1e958e45a6bbdccf402cf6264574c428b198aaa3bce7a7ec99635946b94d9b45"} Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.923988 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bwvk2" event={"ID":"72d4af38-f89a-48ae-bd87-96c7d479310e","Type":"ContainerStarted","Data":"edc7be19c953c52f201a17fc7609505bb54f54cc0787eac83ec5b6294be0da34"} Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.935252 4813 generic.go:334] "Generic (PLEG): container finished" podID="e0d2a4c2-86dc-48d5-90e0-3fcc872046b6" containerID="a51e523b3760efdf9581de7b0e158eb2a2043365f51dd81324877d090cc7fda0" exitCode=0 Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.935431 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfbr2" event={"ID":"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6","Type":"ContainerDied","Data":"a51e523b3760efdf9581de7b0e158eb2a2043365f51dd81324877d090cc7fda0"} Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.937861 4813 generic.go:334] "Generic (PLEG): container finished" podID="77c0d32a-5038-4f35-9faa-8e9cf832b213" containerID="e06ec91195b4f67ebaa0ed42e6ec4fc8835ddc92e185319846dab1d40ae284c7" exitCode=0 Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.937925 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpvn4" event={"ID":"77c0d32a-5038-4f35-9faa-8e9cf832b213","Type":"ContainerDied","Data":"e06ec91195b4f67ebaa0ed42e6ec4fc8835ddc92e185319846dab1d40ae284c7"} Mar 20 15:45:36 crc kubenswrapper[4813]: I0320 15:45:36.937964 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpvn4" event={"ID":"77c0d32a-5038-4f35-9faa-8e9cf832b213","Type":"ContainerStarted","Data":"318e84f824be67ac6c952d0c4fa97c27e8075f24578c43326b8c0526f7d62a96"} Mar 20 15:45:37 crc kubenswrapper[4813]: I0320 15:45:37.946979 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-22jh6" event={"ID":"d857b935-7153-4ddb-bec2-b55fff235cd4","Type":"ContainerStarted","Data":"0bfe8e45adf61049abcc1cbd81761c95a576103a6f0bd4aa465c947c4973f333"} Mar 20 15:45:37 crc kubenswrapper[4813]: I0320 15:45:37.950823 4813 generic.go:334] "Generic (PLEG): container finished" podID="72d4af38-f89a-48ae-bd87-96c7d479310e" containerID="1e958e45a6bbdccf402cf6264574c428b198aaa3bce7a7ec99635946b94d9b45" exitCode=0 Mar 20 15:45:37 crc kubenswrapper[4813]: I0320 15:45:37.950845 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bwvk2" event={"ID":"72d4af38-f89a-48ae-bd87-96c7d479310e","Type":"ContainerDied","Data":"1e958e45a6bbdccf402cf6264574c428b198aaa3bce7a7ec99635946b94d9b45"} Mar 20 15:45:37 crc kubenswrapper[4813]: I0320 15:45:37.953318 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bfbr2" event={"ID":"e0d2a4c2-86dc-48d5-90e0-3fcc872046b6","Type":"ContainerStarted","Data":"28188132ca8371ac643eb8ce0883f0161235c0f9b6f7e43a40e51d8d66bed376"} Mar 20 15:45:37 crc kubenswrapper[4813]: I0320 15:45:37.967425 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-22jh6" podStartSLOduration=2.443529114 podStartE2EDuration="4.967404658s" podCreationTimestamp="2026-03-20 15:45:33 +0000 UTC" firstStartedPulling="2026-03-20 15:45:34.905073453 +0000 UTC m=+464.327776294" lastFinishedPulling="2026-03-20 15:45:37.428948997 +0000 UTC m=+466.851651838" observedRunningTime="2026-03-20 15:45:37.964558683 +0000 UTC m=+467.387261534" watchObservedRunningTime="2026-03-20 15:45:37.967404658 +0000 UTC m=+467.390107499" Mar 20 15:45:38 crc kubenswrapper[4813]: I0320 15:45:38.959329 4813 generic.go:334] "Generic (PLEG): container finished" podID="77c0d32a-5038-4f35-9faa-8e9cf832b213" containerID="0c8c2b43338f7eb197e77a25cb5d8a444ee31f7689a53e58164811860cbeabb0" exitCode=0 Mar 20 15:45:38 crc kubenswrapper[4813]: I0320 15:45:38.959439 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpvn4" event={"ID":"77c0d32a-5038-4f35-9faa-8e9cf832b213","Type":"ContainerDied","Data":"0c8c2b43338f7eb197e77a25cb5d8a444ee31f7689a53e58164811860cbeabb0"} Mar 20 15:45:38 crc kubenswrapper[4813]: I0320 15:45:38.989800 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bfbr2" podStartSLOduration=3.413716145 podStartE2EDuration="5.989771688s" podCreationTimestamp="2026-03-20 15:45:33 +0000 UTC" firstStartedPulling="2026-03-20 15:45:34.904336573 +0000 UTC m=+464.327039414" lastFinishedPulling="2026-03-20 15:45:37.480392086 +0000 UTC m=+466.903094957" observedRunningTime="2026-03-20 15:45:38.007917118 +0000 UTC m=+467.430619959" watchObservedRunningTime="2026-03-20 15:45:38.989771688 +0000 UTC m=+468.412474569" Mar 20 15:45:39 crc kubenswrapper[4813]: I0320 15:45:39.964770 4813 generic.go:334] "Generic (PLEG): container finished" podID="72d4af38-f89a-48ae-bd87-96c7d479310e" containerID="128725d824a01cd178964b11d3dce628f6e424aec16b49e8658b13145753b791" exitCode=0 Mar 20 15:45:39 crc kubenswrapper[4813]: I0320 15:45:39.965019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bwvk2" event={"ID":"72d4af38-f89a-48ae-bd87-96c7d479310e","Type":"ContainerDied","Data":"128725d824a01cd178964b11d3dce628f6e424aec16b49e8658b13145753b791"} Mar 20 15:45:40 crc kubenswrapper[4813]: I0320 15:45:40.972955 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpvn4" event={"ID":"77c0d32a-5038-4f35-9faa-8e9cf832b213","Type":"ContainerStarted","Data":"6b3ef7f5e4703e3bbb90a8583905121983c886488291db4d510cc0b2e55a5d8f"} Mar 20 15:45:41 crc kubenswrapper[4813]: I0320 15:45:41.982146 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bwvk2" event={"ID":"72d4af38-f89a-48ae-bd87-96c7d479310e","Type":"ContainerStarted","Data":"1a3e7e7a8b75240b01e2bb08fc526345d4441730c47eb4b489e05205f8709d0b"} Mar 20 15:45:42 crc kubenswrapper[4813]: I0320 15:45:42.004806 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bwvk2" podStartSLOduration=3.553293842 podStartE2EDuration="7.004788384s" podCreationTimestamp="2026-03-20 15:45:35 +0000 UTC" firstStartedPulling="2026-03-20 15:45:37.952215497 +0000 UTC m=+467.374918338" lastFinishedPulling="2026-03-20 15:45:41.403710039 +0000 UTC m=+470.826412880" observedRunningTime="2026-03-20 15:45:42.003267504 +0000 UTC m=+471.425970355" watchObservedRunningTime="2026-03-20 15:45:42.004788384 +0000 UTC m=+471.427491235" Mar 20 15:45:42 crc kubenswrapper[4813]: I0320 15:45:42.008026 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vpvn4" podStartSLOduration=4.035176247 podStartE2EDuration="7.008014889s" podCreationTimestamp="2026-03-20 15:45:35 +0000 UTC" firstStartedPulling="2026-03-20 15:45:36.939773439 +0000 UTC m=+466.362476280" lastFinishedPulling="2026-03-20 15:45:39.912612081 +0000 UTC m=+469.335314922" observedRunningTime="2026-03-20 15:45:40.997557323 +0000 UTC m=+470.420260174" watchObservedRunningTime="2026-03-20 15:45:42.008014889 +0000 UTC m=+471.430717730" Mar 20 15:45:43 crc kubenswrapper[4813]: I0320 15:45:43.733976 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:43 crc kubenswrapper[4813]: I0320 15:45:43.734689 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:43 crc kubenswrapper[4813]: I0320 15:45:43.780784 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:43 crc kubenswrapper[4813]: I0320 15:45:43.936609 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:43 crc kubenswrapper[4813]: I0320 15:45:43.936661 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:44 crc kubenswrapper[4813]: I0320 15:45:44.009324 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:44 crc kubenswrapper[4813]: I0320 15:45:44.062697 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-22jh6" Mar 20 15:45:44 crc kubenswrapper[4813]: I0320 15:45:44.063912 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bfbr2" Mar 20 15:45:46 crc kubenswrapper[4813]: I0320 15:45:46.131672 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:46 crc kubenswrapper[4813]: I0320 15:45:46.133660 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:46 crc kubenswrapper[4813]: I0320 15:45:46.314070 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:46 crc kubenswrapper[4813]: I0320 15:45:46.314127 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:46 crc kubenswrapper[4813]: I0320 15:45:46.367299 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:47 crc kubenswrapper[4813]: I0320 15:45:47.053028 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bwvk2" Mar 20 15:45:47 crc kubenswrapper[4813]: I0320 15:45:47.186906 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vpvn4" podUID="77c0d32a-5038-4f35-9faa-8e9cf832b213" containerName="registry-server" probeResult="failure" output=< Mar 20 15:45:47 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Mar 20 15:45:47 crc kubenswrapper[4813]: > Mar 20 15:45:48 crc kubenswrapper[4813]: I0320 15:45:48.773987 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" podUID="17ddafd0-c19d-4c6a-a75f-70b85668c360" containerName="registry" containerID="cri-o://988b19ec97ba8ff2a40773fce8a0096a435eb5bc76a450907f9b98c48e012f1d" gracePeriod=30 Mar 20 15:45:49 crc kubenswrapper[4813]: I0320 15:45:49.019621 4813 generic.go:334] "Generic (PLEG): container finished" podID="17ddafd0-c19d-4c6a-a75f-70b85668c360" containerID="988b19ec97ba8ff2a40773fce8a0096a435eb5bc76a450907f9b98c48e012f1d" exitCode=0 Mar 20 15:45:49 crc kubenswrapper[4813]: I0320 15:45:49.019716 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" event={"ID":"17ddafd0-c19d-4c6a-a75f-70b85668c360","Type":"ContainerDied","Data":"988b19ec97ba8ff2a40773fce8a0096a435eb5bc76a450907f9b98c48e012f1d"} Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.775637 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.878847 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-bound-sa-token\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.879089 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.879135 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-tls\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.879178 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/17ddafd0-c19d-4c6a-a75f-70b85668c360-installation-pull-secrets\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.879258 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/17ddafd0-c19d-4c6a-a75f-70b85668c360-ca-trust-extracted\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.879292 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp2sw\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-kube-api-access-vp2sw\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.879368 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-certificates\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.879653 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-trusted-ca\") pod \"17ddafd0-c19d-4c6a-a75f-70b85668c360\" (UID: \"17ddafd0-c19d-4c6a-a75f-70b85668c360\") " Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.880345 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.880436 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.884862 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.885399 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ddafd0-c19d-4c6a-a75f-70b85668c360-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.885641 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.885664 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-kube-api-access-vp2sw" (OuterVolumeSpecName: "kube-api-access-vp2sw") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "kube-api-access-vp2sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.898914 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17ddafd0-c19d-4c6a-a75f-70b85668c360-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.900634 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "17ddafd0-c19d-4c6a-a75f-70b85668c360" (UID: "17ddafd0-c19d-4c6a-a75f-70b85668c360"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.981037 4813 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-tls\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.981068 4813 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/17ddafd0-c19d-4c6a-a75f-70b85668c360-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.981079 4813 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/17ddafd0-c19d-4c6a-a75f-70b85668c360-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.981088 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp2sw\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-kube-api-access-vp2sw\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.981097 4813 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-registry-certificates\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.981104 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/17ddafd0-c19d-4c6a-a75f-70b85668c360-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:49.981111 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/17ddafd0-c19d-4c6a-a75f-70b85668c360-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:50.026470 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" event={"ID":"17ddafd0-c19d-4c6a-a75f-70b85668c360","Type":"ContainerDied","Data":"5eada1b239333935c858557a45eb87955f1fa2845dc50151142030c3ff1a9cfe"} Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:50.026561 4813 scope.go:117] "RemoveContainer" containerID="988b19ec97ba8ff2a40773fce8a0096a435eb5bc76a450907f9b98c48e012f1d" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:50.026624 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b4swq" Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:50.067207 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b4swq"] Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:50.067256 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b4swq"] Mar 20 15:45:52 crc kubenswrapper[4813]: I0320 15:45:51.273585 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17ddafd0-c19d-4c6a-a75f-70b85668c360" path="/var/lib/kubelet/pods/17ddafd0-c19d-4c6a-a75f-70b85668c360/volumes" Mar 20 15:45:56 crc kubenswrapper[4813]: I0320 15:45:56.176678 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:45:56 crc kubenswrapper[4813]: I0320 15:45:56.217655 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vpvn4" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.145312 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567026-dfn9m"] Mar 20 15:46:00 crc kubenswrapper[4813]: E0320 15:46:00.145741 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17ddafd0-c19d-4c6a-a75f-70b85668c360" containerName="registry" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.145770 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="17ddafd0-c19d-4c6a-a75f-70b85668c360" containerName="registry" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.146111 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="17ddafd0-c19d-4c6a-a75f-70b85668c360" containerName="registry" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.146846 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567026-dfn9m" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.151965 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567026-dfn9m"] Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.152676 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.153099 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.153341 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.216380 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm6l9\" (UniqueName: \"kubernetes.io/projected/121959d8-8639-4a5e-a125-12ba6250f660-kube-api-access-xm6l9\") pod \"auto-csr-approver-29567026-dfn9m\" (UID: \"121959d8-8639-4a5e-a125-12ba6250f660\") " pod="openshift-infra/auto-csr-approver-29567026-dfn9m" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.318111 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm6l9\" (UniqueName: \"kubernetes.io/projected/121959d8-8639-4a5e-a125-12ba6250f660-kube-api-access-xm6l9\") pod \"auto-csr-approver-29567026-dfn9m\" (UID: \"121959d8-8639-4a5e-a125-12ba6250f660\") " pod="openshift-infra/auto-csr-approver-29567026-dfn9m" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.344450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm6l9\" (UniqueName: \"kubernetes.io/projected/121959d8-8639-4a5e-a125-12ba6250f660-kube-api-access-xm6l9\") pod \"auto-csr-approver-29567026-dfn9m\" (UID: \"121959d8-8639-4a5e-a125-12ba6250f660\") " pod="openshift-infra/auto-csr-approver-29567026-dfn9m" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.478523 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567026-dfn9m" Mar 20 15:46:00 crc kubenswrapper[4813]: I0320 15:46:00.937770 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567026-dfn9m"] Mar 20 15:46:01 crc kubenswrapper[4813]: I0320 15:46:01.199428 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567026-dfn9m" event={"ID":"121959d8-8639-4a5e-a125-12ba6250f660","Type":"ContainerStarted","Data":"b02d3169c29bc705d6475c5323fccb50bb10245f937735a8f1cba46eaa821e14"} Mar 20 15:46:03 crc kubenswrapper[4813]: I0320 15:46:03.212040 4813 generic.go:334] "Generic (PLEG): container finished" podID="121959d8-8639-4a5e-a125-12ba6250f660" containerID="8fb5339cd161cd993172cd34707d2216a2e6c6acb6b08921536e79fbf6000c1c" exitCode=0 Mar 20 15:46:03 crc kubenswrapper[4813]: I0320 15:46:03.212087 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567026-dfn9m" event={"ID":"121959d8-8639-4a5e-a125-12ba6250f660","Type":"ContainerDied","Data":"8fb5339cd161cd993172cd34707d2216a2e6c6acb6b08921536e79fbf6000c1c"} Mar 20 15:46:03 crc kubenswrapper[4813]: I0320 15:46:03.842335 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:46:03 crc kubenswrapper[4813]: I0320 15:46:03.842721 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:46:04 crc kubenswrapper[4813]: I0320 15:46:04.727962 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567026-dfn9m" Mar 20 15:46:04 crc kubenswrapper[4813]: I0320 15:46:04.895905 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xm6l9\" (UniqueName: \"kubernetes.io/projected/121959d8-8639-4a5e-a125-12ba6250f660-kube-api-access-xm6l9\") pod \"121959d8-8639-4a5e-a125-12ba6250f660\" (UID: \"121959d8-8639-4a5e-a125-12ba6250f660\") " Mar 20 15:46:04 crc kubenswrapper[4813]: I0320 15:46:04.900536 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/121959d8-8639-4a5e-a125-12ba6250f660-kube-api-access-xm6l9" (OuterVolumeSpecName: "kube-api-access-xm6l9") pod "121959d8-8639-4a5e-a125-12ba6250f660" (UID: "121959d8-8639-4a5e-a125-12ba6250f660"). InnerVolumeSpecName "kube-api-access-xm6l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:46:04 crc kubenswrapper[4813]: I0320 15:46:04.997309 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xm6l9\" (UniqueName: \"kubernetes.io/projected/121959d8-8639-4a5e-a125-12ba6250f660-kube-api-access-xm6l9\") on node \"crc\" DevicePath \"\"" Mar 20 15:46:05 crc kubenswrapper[4813]: I0320 15:46:05.229606 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567026-dfn9m" event={"ID":"121959d8-8639-4a5e-a125-12ba6250f660","Type":"ContainerDied","Data":"b02d3169c29bc705d6475c5323fccb50bb10245f937735a8f1cba46eaa821e14"} Mar 20 15:46:05 crc kubenswrapper[4813]: I0320 15:46:05.229648 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b02d3169c29bc705d6475c5323fccb50bb10245f937735a8f1cba46eaa821e14" Mar 20 15:46:05 crc kubenswrapper[4813]: I0320 15:46:05.229693 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567026-dfn9m" Mar 20 15:46:05 crc kubenswrapper[4813]: I0320 15:46:05.792310 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567020-4l7qk"] Mar 20 15:46:05 crc kubenswrapper[4813]: I0320 15:46:05.799007 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567020-4l7qk"] Mar 20 15:46:07 crc kubenswrapper[4813]: I0320 15:46:07.277772 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4d83813-6127-4a79-ad93-bd5cafe64abd" path="/var/lib/kubelet/pods/e4d83813-6127-4a79-ad93-bd5cafe64abd/volumes" Mar 20 15:46:33 crc kubenswrapper[4813]: I0320 15:46:33.842881 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:46:33 crc kubenswrapper[4813]: I0320 15:46:33.843692 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:46:33 crc kubenswrapper[4813]: I0320 15:46:33.843771 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:46:33 crc kubenswrapper[4813]: I0320 15:46:33.844726 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"93237d0a8b83a96070f53085d50fd6e82323eb4f93a8d55d7840bf40381d06aa"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 15:46:33 crc kubenswrapper[4813]: I0320 15:46:33.844797 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://93237d0a8b83a96070f53085d50fd6e82323eb4f93a8d55d7840bf40381d06aa" gracePeriod=600 Mar 20 15:46:34 crc kubenswrapper[4813]: I0320 15:46:34.397652 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="93237d0a8b83a96070f53085d50fd6e82323eb4f93a8d55d7840bf40381d06aa" exitCode=0 Mar 20 15:46:34 crc kubenswrapper[4813]: I0320 15:46:34.397728 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"93237d0a8b83a96070f53085d50fd6e82323eb4f93a8d55d7840bf40381d06aa"} Mar 20 15:46:34 crc kubenswrapper[4813]: I0320 15:46:34.397933 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"73a01997359872a7d6c06593a97cbca955cca2ef12575691de07e76008cef282"} Mar 20 15:46:34 crc kubenswrapper[4813]: I0320 15:46:34.397954 4813 scope.go:117] "RemoveContainer" containerID="61246152c29fc6c34b9ff30eaee435366d318e640cc78f5f2faf1535927f45e2" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.148002 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567028-m4wlb"] Mar 20 15:48:00 crc kubenswrapper[4813]: E0320 15:48:00.149182 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="121959d8-8639-4a5e-a125-12ba6250f660" containerName="oc" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.149207 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="121959d8-8639-4a5e-a125-12ba6250f660" containerName="oc" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.149407 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="121959d8-8639-4a5e-a125-12ba6250f660" containerName="oc" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.150274 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567028-m4wlb" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.153169 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.155664 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.156101 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.156406 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567028-m4wlb"] Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.165659 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbxjk\" (UniqueName: \"kubernetes.io/projected/43e20dfc-363f-43d2-9f6b-86ba5209e70b-kube-api-access-nbxjk\") pod \"auto-csr-approver-29567028-m4wlb\" (UID: \"43e20dfc-363f-43d2-9f6b-86ba5209e70b\") " pod="openshift-infra/auto-csr-approver-29567028-m4wlb" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.267594 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbxjk\" (UniqueName: \"kubernetes.io/projected/43e20dfc-363f-43d2-9f6b-86ba5209e70b-kube-api-access-nbxjk\") pod \"auto-csr-approver-29567028-m4wlb\" (UID: \"43e20dfc-363f-43d2-9f6b-86ba5209e70b\") " pod="openshift-infra/auto-csr-approver-29567028-m4wlb" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.291048 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbxjk\" (UniqueName: \"kubernetes.io/projected/43e20dfc-363f-43d2-9f6b-86ba5209e70b-kube-api-access-nbxjk\") pod \"auto-csr-approver-29567028-m4wlb\" (UID: \"43e20dfc-363f-43d2-9f6b-86ba5209e70b\") " pod="openshift-infra/auto-csr-approver-29567028-m4wlb" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.474889 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567028-m4wlb" Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.704265 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567028-m4wlb"] Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.710004 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 15:48:00 crc kubenswrapper[4813]: I0320 15:48:00.952423 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567028-m4wlb" event={"ID":"43e20dfc-363f-43d2-9f6b-86ba5209e70b","Type":"ContainerStarted","Data":"1a0e198e06eb8c169ef2613f94829068e3edd952851f104f9479c34301223de9"} Mar 20 15:48:02 crc kubenswrapper[4813]: I0320 15:48:02.970751 4813 generic.go:334] "Generic (PLEG): container finished" podID="43e20dfc-363f-43d2-9f6b-86ba5209e70b" containerID="5728b1c29d28538b8c53ebdf94db21b86e32012c9f9955d5384578134aa71543" exitCode=0 Mar 20 15:48:02 crc kubenswrapper[4813]: I0320 15:48:02.970793 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567028-m4wlb" event={"ID":"43e20dfc-363f-43d2-9f6b-86ba5209e70b","Type":"ContainerDied","Data":"5728b1c29d28538b8c53ebdf94db21b86e32012c9f9955d5384578134aa71543"} Mar 20 15:48:04 crc kubenswrapper[4813]: I0320 15:48:04.190516 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567028-m4wlb" Mar 20 15:48:04 crc kubenswrapper[4813]: I0320 15:48:04.324843 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbxjk\" (UniqueName: \"kubernetes.io/projected/43e20dfc-363f-43d2-9f6b-86ba5209e70b-kube-api-access-nbxjk\") pod \"43e20dfc-363f-43d2-9f6b-86ba5209e70b\" (UID: \"43e20dfc-363f-43d2-9f6b-86ba5209e70b\") " Mar 20 15:48:04 crc kubenswrapper[4813]: I0320 15:48:04.332594 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43e20dfc-363f-43d2-9f6b-86ba5209e70b-kube-api-access-nbxjk" (OuterVolumeSpecName: "kube-api-access-nbxjk") pod "43e20dfc-363f-43d2-9f6b-86ba5209e70b" (UID: "43e20dfc-363f-43d2-9f6b-86ba5209e70b"). InnerVolumeSpecName "kube-api-access-nbxjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:48:04 crc kubenswrapper[4813]: I0320 15:48:04.426314 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbxjk\" (UniqueName: \"kubernetes.io/projected/43e20dfc-363f-43d2-9f6b-86ba5209e70b-kube-api-access-nbxjk\") on node \"crc\" DevicePath \"\"" Mar 20 15:48:04 crc kubenswrapper[4813]: I0320 15:48:04.994420 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567028-m4wlb" event={"ID":"43e20dfc-363f-43d2-9f6b-86ba5209e70b","Type":"ContainerDied","Data":"1a0e198e06eb8c169ef2613f94829068e3edd952851f104f9479c34301223de9"} Mar 20 15:48:04 crc kubenswrapper[4813]: I0320 15:48:04.994453 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a0e198e06eb8c169ef2613f94829068e3edd952851f104f9479c34301223de9" Mar 20 15:48:04 crc kubenswrapper[4813]: I0320 15:48:04.994554 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567028-m4wlb" Mar 20 15:48:05 crc kubenswrapper[4813]: I0320 15:48:05.277842 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567022-zfcwm"] Mar 20 15:48:05 crc kubenswrapper[4813]: I0320 15:48:05.277903 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567022-zfcwm"] Mar 20 15:48:07 crc kubenswrapper[4813]: I0320 15:48:07.273576 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9138f2f5-c58b-4256-88e6-d3c52d034cf4" path="/var/lib/kubelet/pods/9138f2f5-c58b-4256-88e6-d3c52d034cf4/volumes" Mar 20 15:49:03 crc kubenswrapper[4813]: I0320 15:49:03.842585 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:49:03 crc kubenswrapper[4813]: I0320 15:49:03.843244 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:49:05 crc kubenswrapper[4813]: I0320 15:49:05.874419 4813 scope.go:117] "RemoveContainer" containerID="8470145f0a3dae96a1d31d937e1c90a87d8dcc8e1bc0eb7995dc2bae0799a2a0" Mar 20 15:49:05 crc kubenswrapper[4813]: I0320 15:49:05.902511 4813 scope.go:117] "RemoveContainer" containerID="1699ba9b45404496c4aa0835d5b254dda6eb44875089606d85807447862d0bde" Mar 20 15:49:33 crc kubenswrapper[4813]: I0320 15:49:33.843631 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:49:33 crc kubenswrapper[4813]: I0320 15:49:33.844514 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.144036 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567030-mrdjh"] Mar 20 15:50:00 crc kubenswrapper[4813]: E0320 15:50:00.145131 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43e20dfc-363f-43d2-9f6b-86ba5209e70b" containerName="oc" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.145152 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="43e20dfc-363f-43d2-9f6b-86ba5209e70b" containerName="oc" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.145414 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="43e20dfc-363f-43d2-9f6b-86ba5209e70b" containerName="oc" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.145988 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567030-mrdjh" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.148652 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.148867 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.149039 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.160402 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567030-mrdjh"] Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.333895 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gcrm\" (UniqueName: \"kubernetes.io/projected/1e55fc1c-85ca-43da-9c6a-e774ad82bdb0-kube-api-access-5gcrm\") pod \"auto-csr-approver-29567030-mrdjh\" (UID: \"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0\") " pod="openshift-infra/auto-csr-approver-29567030-mrdjh" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.435679 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gcrm\" (UniqueName: \"kubernetes.io/projected/1e55fc1c-85ca-43da-9c6a-e774ad82bdb0-kube-api-access-5gcrm\") pod \"auto-csr-approver-29567030-mrdjh\" (UID: \"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0\") " pod="openshift-infra/auto-csr-approver-29567030-mrdjh" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.460572 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gcrm\" (UniqueName: \"kubernetes.io/projected/1e55fc1c-85ca-43da-9c6a-e774ad82bdb0-kube-api-access-5gcrm\") pod \"auto-csr-approver-29567030-mrdjh\" (UID: \"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0\") " pod="openshift-infra/auto-csr-approver-29567030-mrdjh" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.476071 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567030-mrdjh" Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.704625 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567030-mrdjh"] Mar 20 15:50:00 crc kubenswrapper[4813]: W0320 15:50:00.711391 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e55fc1c_85ca_43da_9c6a_e774ad82bdb0.slice/crio-6969c38a637ae9cf19fec61f903d998a9745427ec57e3330dcfd78447e3a6a69 WatchSource:0}: Error finding container 6969c38a637ae9cf19fec61f903d998a9745427ec57e3330dcfd78447e3a6a69: Status 404 returned error can't find the container with id 6969c38a637ae9cf19fec61f903d998a9745427ec57e3330dcfd78447e3a6a69 Mar 20 15:50:00 crc kubenswrapper[4813]: I0320 15:50:00.724878 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567030-mrdjh" event={"ID":"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0","Type":"ContainerStarted","Data":"6969c38a637ae9cf19fec61f903d998a9745427ec57e3330dcfd78447e3a6a69"} Mar 20 15:50:02 crc kubenswrapper[4813]: I0320 15:50:02.739663 4813 generic.go:334] "Generic (PLEG): container finished" podID="1e55fc1c-85ca-43da-9c6a-e774ad82bdb0" containerID="8ed9e6eb27f590d7f19fd05f14df351407957a09bb413dc5a49fb0678e8f9ec9" exitCode=0 Mar 20 15:50:02 crc kubenswrapper[4813]: I0320 15:50:02.739747 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567030-mrdjh" event={"ID":"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0","Type":"ContainerDied","Data":"8ed9e6eb27f590d7f19fd05f14df351407957a09bb413dc5a49fb0678e8f9ec9"} Mar 20 15:50:03 crc kubenswrapper[4813]: I0320 15:50:03.842194 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:50:03 crc kubenswrapper[4813]: I0320 15:50:03.842266 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:50:03 crc kubenswrapper[4813]: I0320 15:50:03.842313 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:50:03 crc kubenswrapper[4813]: I0320 15:50:03.842972 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"73a01997359872a7d6c06593a97cbca955cca2ef12575691de07e76008cef282"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 15:50:03 crc kubenswrapper[4813]: I0320 15:50:03.843028 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://73a01997359872a7d6c06593a97cbca955cca2ef12575691de07e76008cef282" gracePeriod=600 Mar 20 15:50:03 crc kubenswrapper[4813]: I0320 15:50:03.998255 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567030-mrdjh" Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.089230 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gcrm\" (UniqueName: \"kubernetes.io/projected/1e55fc1c-85ca-43da-9c6a-e774ad82bdb0-kube-api-access-5gcrm\") pod \"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0\" (UID: \"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0\") " Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.094593 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e55fc1c-85ca-43da-9c6a-e774ad82bdb0-kube-api-access-5gcrm" (OuterVolumeSpecName: "kube-api-access-5gcrm") pod "1e55fc1c-85ca-43da-9c6a-e774ad82bdb0" (UID: "1e55fc1c-85ca-43da-9c6a-e774ad82bdb0"). InnerVolumeSpecName "kube-api-access-5gcrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.190976 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gcrm\" (UniqueName: \"kubernetes.io/projected/1e55fc1c-85ca-43da-9c6a-e774ad82bdb0-kube-api-access-5gcrm\") on node \"crc\" DevicePath \"\"" Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.757749 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="73a01997359872a7d6c06593a97cbca955cca2ef12575691de07e76008cef282" exitCode=0 Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.757820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"73a01997359872a7d6c06593a97cbca955cca2ef12575691de07e76008cef282"} Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.758122 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"c4e8d3a270bd3cde9b5259a1a03876e8e2c58d54ad599c8e659fb3e3d9e094f9"} Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.758142 4813 scope.go:117] "RemoveContainer" containerID="93237d0a8b83a96070f53085d50fd6e82323eb4f93a8d55d7840bf40381d06aa" Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.760410 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567030-mrdjh" event={"ID":"1e55fc1c-85ca-43da-9c6a-e774ad82bdb0","Type":"ContainerDied","Data":"6969c38a637ae9cf19fec61f903d998a9745427ec57e3330dcfd78447e3a6a69"} Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.760427 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6969c38a637ae9cf19fec61f903d998a9745427ec57e3330dcfd78447e3a6a69" Mar 20 15:50:04 crc kubenswrapper[4813]: I0320 15:50:04.760462 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567030-mrdjh" Mar 20 15:50:05 crc kubenswrapper[4813]: I0320 15:50:05.068758 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567024-hvhw8"] Mar 20 15:50:05 crc kubenswrapper[4813]: I0320 15:50:05.076088 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567024-hvhw8"] Mar 20 15:50:05 crc kubenswrapper[4813]: I0320 15:50:05.277992 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="992ec145-8ff5-49a3-aa0f-6bb554b83c80" path="/var/lib/kubelet/pods/992ec145-8ff5-49a3-aa0f-6bb554b83c80/volumes" Mar 20 15:50:05 crc kubenswrapper[4813]: I0320 15:50:05.966263 4813 scope.go:117] "RemoveContainer" containerID="543fc3c3421acd945590cc546ed1fc59833d02a7f8dd38550b774288e9f8e904" Mar 20 15:51:49 crc kubenswrapper[4813]: I0320 15:51:49.397621 4813 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.151016 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567032-rhfbl"] Mar 20 15:52:00 crc kubenswrapper[4813]: E0320 15:52:00.151981 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e55fc1c-85ca-43da-9c6a-e774ad82bdb0" containerName="oc" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.152003 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e55fc1c-85ca-43da-9c6a-e774ad82bdb0" containerName="oc" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.152158 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e55fc1c-85ca-43da-9c6a-e774ad82bdb0" containerName="oc" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.152786 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567032-rhfbl" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.156815 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.156976 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.157159 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.165829 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567032-rhfbl"] Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.276769 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xv5wk\" (UniqueName: \"kubernetes.io/projected/258cc7b6-bd0b-4452-911e-7b8091b7c9f0-kube-api-access-xv5wk\") pod \"auto-csr-approver-29567032-rhfbl\" (UID: \"258cc7b6-bd0b-4452-911e-7b8091b7c9f0\") " pod="openshift-infra/auto-csr-approver-29567032-rhfbl" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.378121 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xv5wk\" (UniqueName: \"kubernetes.io/projected/258cc7b6-bd0b-4452-911e-7b8091b7c9f0-kube-api-access-xv5wk\") pod \"auto-csr-approver-29567032-rhfbl\" (UID: \"258cc7b6-bd0b-4452-911e-7b8091b7c9f0\") " pod="openshift-infra/auto-csr-approver-29567032-rhfbl" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.403943 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xv5wk\" (UniqueName: \"kubernetes.io/projected/258cc7b6-bd0b-4452-911e-7b8091b7c9f0-kube-api-access-xv5wk\") pod \"auto-csr-approver-29567032-rhfbl\" (UID: \"258cc7b6-bd0b-4452-911e-7b8091b7c9f0\") " pod="openshift-infra/auto-csr-approver-29567032-rhfbl" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.475801 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567032-rhfbl" Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.703748 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567032-rhfbl"] Mar 20 15:52:00 crc kubenswrapper[4813]: I0320 15:52:00.839345 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567032-rhfbl" event={"ID":"258cc7b6-bd0b-4452-911e-7b8091b7c9f0","Type":"ContainerStarted","Data":"fe0d72dc443a21bd6802aad0960d9629e4deca55fd60a1339584ea2acc5b28e7"} Mar 20 15:52:02 crc kubenswrapper[4813]: I0320 15:52:02.856988 4813 generic.go:334] "Generic (PLEG): container finished" podID="258cc7b6-bd0b-4452-911e-7b8091b7c9f0" containerID="b2b727a421c9f3999dd473266cdad1a4537e7af623db0ba1517fb3c71d854c3f" exitCode=0 Mar 20 15:52:02 crc kubenswrapper[4813]: I0320 15:52:02.857120 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567032-rhfbl" event={"ID":"258cc7b6-bd0b-4452-911e-7b8091b7c9f0","Type":"ContainerDied","Data":"b2b727a421c9f3999dd473266cdad1a4537e7af623db0ba1517fb3c71d854c3f"} Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.157838 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567032-rhfbl" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.331197 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xv5wk\" (UniqueName: \"kubernetes.io/projected/258cc7b6-bd0b-4452-911e-7b8091b7c9f0-kube-api-access-xv5wk\") pod \"258cc7b6-bd0b-4452-911e-7b8091b7c9f0\" (UID: \"258cc7b6-bd0b-4452-911e-7b8091b7c9f0\") " Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.337845 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/258cc7b6-bd0b-4452-911e-7b8091b7c9f0-kube-api-access-xv5wk" (OuterVolumeSpecName: "kube-api-access-xv5wk") pod "258cc7b6-bd0b-4452-911e-7b8091b7c9f0" (UID: "258cc7b6-bd0b-4452-911e-7b8091b7c9f0"). InnerVolumeSpecName "kube-api-access-xv5wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.433176 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xv5wk\" (UniqueName: \"kubernetes.io/projected/258cc7b6-bd0b-4452-911e-7b8091b7c9f0-kube-api-access-xv5wk\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.872134 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567032-rhfbl" event={"ID":"258cc7b6-bd0b-4452-911e-7b8091b7c9f0","Type":"ContainerDied","Data":"fe0d72dc443a21bd6802aad0960d9629e4deca55fd60a1339584ea2acc5b28e7"} Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.872181 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe0d72dc443a21bd6802aad0960d9629e4deca55fd60a1339584ea2acc5b28e7" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.872210 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567032-rhfbl" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.897226 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6mgc7"] Mar 20 15:52:04 crc kubenswrapper[4813]: E0320 15:52:04.897502 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="258cc7b6-bd0b-4452-911e-7b8091b7c9f0" containerName="oc" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.897522 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="258cc7b6-bd0b-4452-911e-7b8091b7c9f0" containerName="oc" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.897651 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="258cc7b6-bd0b-4452-911e-7b8091b7c9f0" containerName="oc" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.898502 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:04 crc kubenswrapper[4813]: I0320 15:52:04.908866 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mgc7"] Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.041824 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-catalog-content\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.042264 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-utilities\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.042322 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swqfk\" (UniqueName: \"kubernetes.io/projected/c246c801-2892-43ac-b605-0d46f649e294-kube-api-access-swqfk\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.143346 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-utilities\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.143435 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swqfk\" (UniqueName: \"kubernetes.io/projected/c246c801-2892-43ac-b605-0d46f649e294-kube-api-access-swqfk\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.143468 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-catalog-content\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.143968 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-utilities\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.144003 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-catalog-content\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.167220 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swqfk\" (UniqueName: \"kubernetes.io/projected/c246c801-2892-43ac-b605-0d46f649e294-kube-api-access-swqfk\") pod \"redhat-marketplace-6mgc7\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.210654 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567026-dfn9m"] Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.213724 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.216112 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567026-dfn9m"] Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.271777 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="121959d8-8639-4a5e-a125-12ba6250f660" path="/var/lib/kubelet/pods/121959d8-8639-4a5e-a125-12ba6250f660/volumes" Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.471558 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mgc7"] Mar 20 15:52:05 crc kubenswrapper[4813]: W0320 15:52:05.472997 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc246c801_2892_43ac_b605_0d46f649e294.slice/crio-6dbda2af54926860b4b2a11ad6939146fd312fee49f9023e2c57190b9a32dff5 WatchSource:0}: Error finding container 6dbda2af54926860b4b2a11ad6939146fd312fee49f9023e2c57190b9a32dff5: Status 404 returned error can't find the container with id 6dbda2af54926860b4b2a11ad6939146fd312fee49f9023e2c57190b9a32dff5 Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.881746 4813 generic.go:334] "Generic (PLEG): container finished" podID="c246c801-2892-43ac-b605-0d46f649e294" containerID="a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b" exitCode=0 Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.881798 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mgc7" event={"ID":"c246c801-2892-43ac-b605-0d46f649e294","Type":"ContainerDied","Data":"a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b"} Mar 20 15:52:05 crc kubenswrapper[4813]: I0320 15:52:05.881831 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mgc7" event={"ID":"c246c801-2892-43ac-b605-0d46f649e294","Type":"ContainerStarted","Data":"6dbda2af54926860b4b2a11ad6939146fd312fee49f9023e2c57190b9a32dff5"} Mar 20 15:52:06 crc kubenswrapper[4813]: I0320 15:52:06.038855 4813 scope.go:117] "RemoveContainer" containerID="8fb5339cd161cd993172cd34707d2216a2e6c6acb6b08921536e79fbf6000c1c" Mar 20 15:52:07 crc kubenswrapper[4813]: I0320 15:52:07.902608 4813 generic.go:334] "Generic (PLEG): container finished" podID="c246c801-2892-43ac-b605-0d46f649e294" containerID="d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513" exitCode=0 Mar 20 15:52:07 crc kubenswrapper[4813]: I0320 15:52:07.902680 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mgc7" event={"ID":"c246c801-2892-43ac-b605-0d46f649e294","Type":"ContainerDied","Data":"d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513"} Mar 20 15:52:08 crc kubenswrapper[4813]: I0320 15:52:08.911423 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mgc7" event={"ID":"c246c801-2892-43ac-b605-0d46f649e294","Type":"ContainerStarted","Data":"8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713"} Mar 20 15:52:08 crc kubenswrapper[4813]: I0320 15:52:08.932471 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6mgc7" podStartSLOduration=2.5350398419999998 podStartE2EDuration="4.932453958s" podCreationTimestamp="2026-03-20 15:52:04 +0000 UTC" firstStartedPulling="2026-03-20 15:52:05.883795109 +0000 UTC m=+855.306497950" lastFinishedPulling="2026-03-20 15:52:08.281209225 +0000 UTC m=+857.703912066" observedRunningTime="2026-03-20 15:52:08.928926742 +0000 UTC m=+858.351629573" watchObservedRunningTime="2026-03-20 15:52:08.932453958 +0000 UTC m=+858.355156799" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.130219 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m"] Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.134132 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.142591 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m"] Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.177395 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.277587 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxxhd\" (UniqueName: \"kubernetes.io/projected/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-kube-api-access-qxxhd\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.277676 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-util\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.277731 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-bundle\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.379057 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxxhd\" (UniqueName: \"kubernetes.io/projected/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-kube-api-access-qxxhd\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.379142 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-util\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.379208 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-bundle\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.380336 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-util\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.381143 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-bundle\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.406984 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxxhd\" (UniqueName: \"kubernetes.io/projected/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-kube-api-access-qxxhd\") pod \"93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.490930 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.758753 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m"] Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.947213 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" event={"ID":"ee83deb1-a281-4eb3-9ba2-7212ae713ae6","Type":"ContainerStarted","Data":"73f19a4ec4cad08bd1cba42eb2ab20261957b5656ae840dfb81fb9aaeacc72b6"} Mar 20 15:52:14 crc kubenswrapper[4813]: I0320 15:52:14.947543 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" event={"ID":"ee83deb1-a281-4eb3-9ba2-7212ae713ae6","Type":"ContainerStarted","Data":"3428b7901766dc7f55fdacd47b251d3ded30f8ea24920bf5556204e9c2e9fa69"} Mar 20 15:52:15 crc kubenswrapper[4813]: I0320 15:52:15.214051 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:15 crc kubenswrapper[4813]: I0320 15:52:15.214133 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:15 crc kubenswrapper[4813]: I0320 15:52:15.275742 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:15 crc kubenswrapper[4813]: I0320 15:52:15.957235 4813 generic.go:334] "Generic (PLEG): container finished" podID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerID="73f19a4ec4cad08bd1cba42eb2ab20261957b5656ae840dfb81fb9aaeacc72b6" exitCode=0 Mar 20 15:52:15 crc kubenswrapper[4813]: I0320 15:52:15.957281 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" event={"ID":"ee83deb1-a281-4eb3-9ba2-7212ae713ae6","Type":"ContainerDied","Data":"73f19a4ec4cad08bd1cba42eb2ab20261957b5656ae840dfb81fb9aaeacc72b6"} Mar 20 15:52:16 crc kubenswrapper[4813]: I0320 15:52:16.015350 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:16 crc kubenswrapper[4813]: I0320 15:52:16.875884 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2krpb"] Mar 20 15:52:16 crc kubenswrapper[4813]: I0320 15:52:16.878879 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:16 crc kubenswrapper[4813]: I0320 15:52:16.886285 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2krpb"] Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.017308 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-utilities\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.017614 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvn9c\" (UniqueName: \"kubernetes.io/projected/dab66f52-4455-43e4-a3de-65d2611686d6-kube-api-access-vvn9c\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.017806 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-catalog-content\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.119247 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-utilities\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.119300 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvn9c\" (UniqueName: \"kubernetes.io/projected/dab66f52-4455-43e4-a3de-65d2611686d6-kube-api-access-vvn9c\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.119327 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-catalog-content\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.119779 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-catalog-content\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.119877 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-utilities\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.144758 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvn9c\" (UniqueName: \"kubernetes.io/projected/dab66f52-4455-43e4-a3de-65d2611686d6-kube-api-access-vvn9c\") pod \"redhat-operators-2krpb\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.208925 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.616562 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2krpb"] Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.968181 4813 generic.go:334] "Generic (PLEG): container finished" podID="dab66f52-4455-43e4-a3de-65d2611686d6" containerID="78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb" exitCode=0 Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.968227 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2krpb" event={"ID":"dab66f52-4455-43e4-a3de-65d2611686d6","Type":"ContainerDied","Data":"78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb"} Mar 20 15:52:17 crc kubenswrapper[4813]: I0320 15:52:17.968280 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2krpb" event={"ID":"dab66f52-4455-43e4-a3de-65d2611686d6","Type":"ContainerStarted","Data":"820cf214a4e45ef987f941e7ed74fa81e8b6515ef459bfccfc31324ba93a7ac9"} Mar 20 15:52:18 crc kubenswrapper[4813]: I0320 15:52:18.857303 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mgc7"] Mar 20 15:52:18 crc kubenswrapper[4813]: I0320 15:52:18.857765 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6mgc7" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="registry-server" containerID="cri-o://8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713" gracePeriod=2 Mar 20 15:52:18 crc kubenswrapper[4813]: I0320 15:52:18.975694 4813 generic.go:334] "Generic (PLEG): container finished" podID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerID="c93d1cb1a341233feb2541eadda45752e9548fe7d8e07f83ec846855f4a92847" exitCode=0 Mar 20 15:52:18 crc kubenswrapper[4813]: I0320 15:52:18.975760 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" event={"ID":"ee83deb1-a281-4eb3-9ba2-7212ae713ae6","Type":"ContainerDied","Data":"c93d1cb1a341233feb2541eadda45752e9548fe7d8e07f83ec846855f4a92847"} Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.224717 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.352718 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-utilities\") pod \"c246c801-2892-43ac-b605-0d46f649e294\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.352928 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swqfk\" (UniqueName: \"kubernetes.io/projected/c246c801-2892-43ac-b605-0d46f649e294-kube-api-access-swqfk\") pod \"c246c801-2892-43ac-b605-0d46f649e294\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.352965 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-catalog-content\") pod \"c246c801-2892-43ac-b605-0d46f649e294\" (UID: \"c246c801-2892-43ac-b605-0d46f649e294\") " Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.353457 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-utilities" (OuterVolumeSpecName: "utilities") pod "c246c801-2892-43ac-b605-0d46f649e294" (UID: "c246c801-2892-43ac-b605-0d46f649e294"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.360093 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c246c801-2892-43ac-b605-0d46f649e294-kube-api-access-swqfk" (OuterVolumeSpecName: "kube-api-access-swqfk") pod "c246c801-2892-43ac-b605-0d46f649e294" (UID: "c246c801-2892-43ac-b605-0d46f649e294"). InnerVolumeSpecName "kube-api-access-swqfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.401477 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c246c801-2892-43ac-b605-0d46f649e294" (UID: "c246c801-2892-43ac-b605-0d46f649e294"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.454469 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swqfk\" (UniqueName: \"kubernetes.io/projected/c246c801-2892-43ac-b605-0d46f649e294-kube-api-access-swqfk\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.454535 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.454546 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c246c801-2892-43ac-b605-0d46f649e294-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.985916 4813 generic.go:334] "Generic (PLEG): container finished" podID="c246c801-2892-43ac-b605-0d46f649e294" containerID="8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713" exitCode=0 Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.985997 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mgc7" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.986002 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mgc7" event={"ID":"c246c801-2892-43ac-b605-0d46f649e294","Type":"ContainerDied","Data":"8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713"} Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.986165 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mgc7" event={"ID":"c246c801-2892-43ac-b605-0d46f649e294","Type":"ContainerDied","Data":"6dbda2af54926860b4b2a11ad6939146fd312fee49f9023e2c57190b9a32dff5"} Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.986185 4813 scope.go:117] "RemoveContainer" containerID="8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713" Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.989933 4813 generic.go:334] "Generic (PLEG): container finished" podID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerID="5e33bff1be48bb9880bbfaa4b5615d65240dd89df345d880a29bea347a6e0de6" exitCode=0 Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.990033 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" event={"ID":"ee83deb1-a281-4eb3-9ba2-7212ae713ae6","Type":"ContainerDied","Data":"5e33bff1be48bb9880bbfaa4b5615d65240dd89df345d880a29bea347a6e0de6"} Mar 20 15:52:19 crc kubenswrapper[4813]: I0320 15:52:19.993597 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2krpb" event={"ID":"dab66f52-4455-43e4-a3de-65d2611686d6","Type":"ContainerStarted","Data":"90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f"} Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.007225 4813 scope.go:117] "RemoveContainer" containerID="d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513" Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.049809 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mgc7"] Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.050024 4813 scope.go:117] "RemoveContainer" containerID="a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b" Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.056552 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mgc7"] Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.117412 4813 scope.go:117] "RemoveContainer" containerID="8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713" Mar 20 15:52:20 crc kubenswrapper[4813]: E0320 15:52:20.117804 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713\": container with ID starting with 8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713 not found: ID does not exist" containerID="8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713" Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.117847 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713"} err="failed to get container status \"8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713\": rpc error: code = NotFound desc = could not find container \"8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713\": container with ID starting with 8ba72092485f6ae07d6897808c38654061dc3b266fd9af464ee56f6735d54713 not found: ID does not exist" Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.117872 4813 scope.go:117] "RemoveContainer" containerID="d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513" Mar 20 15:52:20 crc kubenswrapper[4813]: E0320 15:52:20.118153 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513\": container with ID starting with d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513 not found: ID does not exist" containerID="d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513" Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.118191 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513"} err="failed to get container status \"d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513\": rpc error: code = NotFound desc = could not find container \"d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513\": container with ID starting with d4dc2bd13fca32565ad2c559a88c90ae644a72f35fbbff9db711919c19720513 not found: ID does not exist" Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.118209 4813 scope.go:117] "RemoveContainer" containerID="a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b" Mar 20 15:52:20 crc kubenswrapper[4813]: E0320 15:52:20.118565 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b\": container with ID starting with a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b not found: ID does not exist" containerID="a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b" Mar 20 15:52:20 crc kubenswrapper[4813]: I0320 15:52:20.118616 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b"} err="failed to get container status \"a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b\": rpc error: code = NotFound desc = could not find container \"a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b\": container with ID starting with a979af229badbb22412c98e81489ffaada1ec98cf2080b23b891b269ab87d80b not found: ID does not exist" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.004246 4813 generic.go:334] "Generic (PLEG): container finished" podID="dab66f52-4455-43e4-a3de-65d2611686d6" containerID="90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f" exitCode=0 Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.004339 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2krpb" event={"ID":"dab66f52-4455-43e4-a3de-65d2611686d6","Type":"ContainerDied","Data":"90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f"} Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.273413 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c246c801-2892-43ac-b605-0d46f649e294" path="/var/lib/kubelet/pods/c246c801-2892-43ac-b605-0d46f649e294/volumes" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.280292 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.377392 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-util\") pod \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.377578 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxxhd\" (UniqueName: \"kubernetes.io/projected/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-kube-api-access-qxxhd\") pod \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.377618 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-bundle\") pod \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\" (UID: \"ee83deb1-a281-4eb3-9ba2-7212ae713ae6\") " Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.382018 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-kube-api-access-qxxhd" (OuterVolumeSpecName: "kube-api-access-qxxhd") pod "ee83deb1-a281-4eb3-9ba2-7212ae713ae6" (UID: "ee83deb1-a281-4eb3-9ba2-7212ae713ae6"). InnerVolumeSpecName "kube-api-access-qxxhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.382937 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-bundle" (OuterVolumeSpecName: "bundle") pod "ee83deb1-a281-4eb3-9ba2-7212ae713ae6" (UID: "ee83deb1-a281-4eb3-9ba2-7212ae713ae6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.389391 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-util" (OuterVolumeSpecName: "util") pod "ee83deb1-a281-4eb3-9ba2-7212ae713ae6" (UID: "ee83deb1-a281-4eb3-9ba2-7212ae713ae6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.478923 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxxhd\" (UniqueName: \"kubernetes.io/projected/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-kube-api-access-qxxhd\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.478971 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:21 crc kubenswrapper[4813]: I0320 15:52:21.478985 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee83deb1-a281-4eb3-9ba2-7212ae713ae6-util\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.018679 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2krpb" event={"ID":"dab66f52-4455-43e4-a3de-65d2611686d6","Type":"ContainerStarted","Data":"beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202"} Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.027255 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" event={"ID":"ee83deb1-a281-4eb3-9ba2-7212ae713ae6","Type":"ContainerDied","Data":"3428b7901766dc7f55fdacd47b251d3ded30f8ea24920bf5556204e9c2e9fa69"} Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.027305 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3428b7901766dc7f55fdacd47b251d3ded30f8ea24920bf5556204e9c2e9fa69" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.027381 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.049375 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2krpb" podStartSLOduration=2.557627259 podStartE2EDuration="6.049356779s" podCreationTimestamp="2026-03-20 15:52:16 +0000 UTC" firstStartedPulling="2026-03-20 15:52:17.971221044 +0000 UTC m=+867.393923885" lastFinishedPulling="2026-03-20 15:52:21.462950564 +0000 UTC m=+870.885653405" observedRunningTime="2026-03-20 15:52:22.04569139 +0000 UTC m=+871.468394251" watchObservedRunningTime="2026-03-20 15:52:22.049356779 +0000 UTC m=+871.472059620" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.615376 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dvvsh"] Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.615959 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-controller" containerID="cri-o://aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.616041 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="northd" containerID="cri-o://bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.616065 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="sbdb" containerID="cri-o://dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.616132 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-node" containerID="cri-o://186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.616198 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="nbdb" containerID="cri-o://257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.616216 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-acl-logging" containerID="cri-o://3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.616115 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.664260 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" containerID="cri-o://4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" gracePeriod=30 Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.916020 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/3.log" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.918348 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovn-acl-logging/0.log" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.918870 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovn-controller/0.log" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.919311 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973669 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9dxkv"] Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973869 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerName="pull" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973880 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerName="pull" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973890 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973896 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973903 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="extract-utilities" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973910 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="extract-utilities" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973916 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973921 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973930 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-node" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973939 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-node" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973952 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-acl-logging" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973960 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-acl-logging" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973970 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="northd" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.973976 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="northd" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.973999 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="nbdb" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974007 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="nbdb" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974016 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974023 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974038 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-ovn-metrics" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974046 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-ovn-metrics" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974055 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="extract-content" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974061 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="extract-content" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974068 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerName="util" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974073 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerName="util" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974080 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerName="extract" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974085 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerName="extract" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974092 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="sbdb" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974097 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="sbdb" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974105 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974110 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974118 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kubecfg-setup" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974123 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kubecfg-setup" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974131 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="registry-server" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974136 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="registry-server" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974232 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="sbdb" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974240 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974247 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974257 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovn-acl-logging" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974265 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974271 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974277 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-ovn-metrics" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974286 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="kube-rbac-proxy-node" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974291 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="nbdb" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974297 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974304 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee83deb1-a281-4eb3-9ba2-7212ae713ae6" containerName="extract" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974310 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c246c801-2892-43ac-b605-0d46f649e294" containerName="registry-server" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974317 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="northd" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974393 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974400 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974510 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: E0320 15:52:22.974599 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.974606 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" containerName="ovnkube-controller" Mar 20 15:52:22 crc kubenswrapper[4813]: I0320 15:52:22.975973 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.034209 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovnkube-controller/3.log" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.036401 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovn-acl-logging/0.log" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.036976 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-dvvsh_32fae70f-6b1f-4935-9747-8080c9feb514/ovn-controller/0.log" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037425 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" exitCode=0 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037455 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" exitCode=0 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037465 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" exitCode=0 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037459 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037534 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037545 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037562 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037578 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037581 4813 scope.go:117] "RemoveContainer" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037493 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" exitCode=0 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037700 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" exitCode=0 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037720 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" exitCode=0 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037734 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" exitCode=143 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037744 4813 generic.go:334] "Generic (PLEG): container finished" podID="32fae70f-6b1f-4935-9747-8080c9feb514" containerID="aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" exitCode=143 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037817 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037848 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037860 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037869 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037875 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037881 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037886 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037891 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037896 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037901 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037906 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037914 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037922 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037929 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037935 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037940 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037945 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037950 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037955 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037960 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037965 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037970 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037978 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.037995 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038001 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038006 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038011 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038017 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038022 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038027 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038032 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038038 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038043 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038052 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dvvsh" event={"ID":"32fae70f-6b1f-4935-9747-8080c9feb514","Type":"ContainerDied","Data":"c9009a25724d4dc7837fa33d67bf1563fa6dae84a84425f2fc0711faf86f70cf"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038061 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038067 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038073 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038077 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038083 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038087 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038092 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038097 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038102 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.038106 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.039536 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/2.log" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.040096 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/1.log" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.040155 4813 generic.go:334] "Generic (PLEG): container finished" podID="a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1" containerID="3ad68338650caa0ff1469d0329f568e52ad46c8dccc1f1ff55ea58f5cf6d50eb" exitCode=2 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.040269 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerDied","Data":"3ad68338650caa0ff1469d0329f568e52ad46c8dccc1f1ff55ea58f5cf6d50eb"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.040299 4813 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d"} Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.040847 4813 scope.go:117] "RemoveContainer" containerID="3ad68338650caa0ff1469d0329f568e52ad46c8dccc1f1ff55ea58f5cf6d50eb" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.069887 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.100863 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-systemd-units\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.100910 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-script-lib\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101130 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-node-log\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101155 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-netd\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101156 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101193 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-config\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101206 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-node-log" (OuterVolumeSpecName: "node-log") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101224 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-ovn-kubernetes\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101228 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101242 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-log-socket\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101267 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-bin\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101286 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-kubelet\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101312 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-openvswitch\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101334 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-ovn\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101357 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32fae70f-6b1f-4935-9747-8080c9feb514-ovn-node-metrics-cert\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101380 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tms64\" (UniqueName: \"kubernetes.io/projected/32fae70f-6b1f-4935-9747-8080c9feb514-kube-api-access-tms64\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101397 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-netns\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101412 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-var-lib-cni-networks-ovn-kubernetes\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101441 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-etc-openvswitch\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101462 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-var-lib-openvswitch\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101510 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-systemd\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101527 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-env-overrides\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101540 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-slash\") pod \"32fae70f-6b1f-4935-9747-8080c9feb514\" (UID: \"32fae70f-6b1f-4935-9747-8080c9feb514\") " Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101589 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101655 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-ovnkube-script-lib\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101676 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-ovnkube-config\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101696 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-run-ovn-kubernetes\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101714 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-systemd\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101729 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-kubelet\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101746 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-slash\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101767 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-ovn\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101782 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-etc-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101797 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-cni-bin\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101812 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzc6g\" (UniqueName: \"kubernetes.io/projected/95b45441-da8b-429c-ba9a-f4b621be3d37-kube-api-access-fzc6g\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101797 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101835 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/95b45441-da8b-429c-ba9a-f4b621be3d37-ovn-node-metrics-cert\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.101942 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102033 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-cni-netd\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102076 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-run-netns\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102086 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102104 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-var-lib-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102135 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102145 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-node-log\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102160 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-log-socket" (OuterVolumeSpecName: "log-socket") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102182 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102199 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102221 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-systemd-units\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102274 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-env-overrides\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102307 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-log-socket\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102423 4813 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-bin\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102445 4813 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-netns\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102462 4813 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-systemd-units\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102539 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102558 4813 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-node-log\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102574 4813 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-cni-netd\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102590 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-ovnkube-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102606 4813 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102621 4813 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-log-socket\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102224 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102241 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102256 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.102823 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-slash" (OuterVolumeSpecName: "host-slash") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.103187 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.103281 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.103323 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.103352 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.105145 4813 scope.go:117] "RemoveContainer" containerID="dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.107838 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32fae70f-6b1f-4935-9747-8080c9feb514-kube-api-access-tms64" (OuterVolumeSpecName: "kube-api-access-tms64") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "kube-api-access-tms64". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.110072 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32fae70f-6b1f-4935-9747-8080c9feb514-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.118964 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "32fae70f-6b1f-4935-9747-8080c9feb514" (UID: "32fae70f-6b1f-4935-9747-8080c9feb514"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.132154 4813 scope.go:117] "RemoveContainer" containerID="257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.147811 4813 scope.go:117] "RemoveContainer" containerID="bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.167260 4813 scope.go:117] "RemoveContainer" containerID="4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.184371 4813 scope.go:117] "RemoveContainer" containerID="186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.197152 4813 scope.go:117] "RemoveContainer" containerID="3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204098 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-run-ovn-kubernetes\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204144 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-systemd\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204171 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-kubelet\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204204 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-slash\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204231 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-ovn\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204249 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-etc-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204268 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-cni-bin\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204287 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzc6g\" (UniqueName: \"kubernetes.io/projected/95b45441-da8b-429c-ba9a-f4b621be3d37-kube-api-access-fzc6g\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204329 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/95b45441-da8b-429c-ba9a-f4b621be3d37-ovn-node-metrics-cert\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204351 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204380 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-cni-netd\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204402 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-run-netns\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204422 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-var-lib-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204443 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-node-log\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204464 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204547 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-systemd-units\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204579 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-env-overrides\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204600 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-log-socket\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204642 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-ovnkube-config\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204663 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-ovnkube-script-lib\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204725 4813 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-systemd\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204738 4813 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/32fae70f-6b1f-4935-9747-8080c9feb514-env-overrides\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204750 4813 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-slash\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204764 4813 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-kubelet\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204776 4813 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-openvswitch\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204787 4813 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-run-ovn\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204799 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/32fae70f-6b1f-4935-9747-8080c9feb514-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204810 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tms64\" (UniqueName: \"kubernetes.io/projected/32fae70f-6b1f-4935-9747-8080c9feb514-kube-api-access-tms64\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204821 4813 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204833 4813 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.204844 4813 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/32fae70f-6b1f-4935-9747-8080c9feb514-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205309 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-run-ovn-kubernetes\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205362 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-systemd\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205390 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-kubelet\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205573 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-run-netns\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205660 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-systemd-units\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205707 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-var-lib-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-node-log\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205777 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205875 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-slash\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205911 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-ovn\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205941 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-etc-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.205969 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-cni-bin\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.206078 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-log-socket\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.206092 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-host-cni-netd\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.206381 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-env-overrides\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.206640 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-ovnkube-config\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.206707 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/95b45441-da8b-429c-ba9a-f4b621be3d37-run-openvswitch\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.206872 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/95b45441-da8b-429c-ba9a-f4b621be3d37-ovnkube-script-lib\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.210063 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/95b45441-da8b-429c-ba9a-f4b621be3d37-ovn-node-metrics-cert\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.223328 4813 scope.go:117] "RemoveContainer" containerID="aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.226660 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzc6g\" (UniqueName: \"kubernetes.io/projected/95b45441-da8b-429c-ba9a-f4b621be3d37-kube-api-access-fzc6g\") pod \"ovnkube-node-9dxkv\" (UID: \"95b45441-da8b-429c-ba9a-f4b621be3d37\") " pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.243574 4813 scope.go:117] "RemoveContainer" containerID="eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.265777 4813 scope.go:117] "RemoveContainer" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.266251 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": container with ID starting with 4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9 not found: ID does not exist" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.266298 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} err="failed to get container status \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": rpc error: code = NotFound desc = could not find container \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": container with ID starting with 4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.266331 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.266669 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": container with ID starting with 376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f not found: ID does not exist" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.266721 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} err="failed to get container status \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": rpc error: code = NotFound desc = could not find container \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": container with ID starting with 376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.266742 4813 scope.go:117] "RemoveContainer" containerID="dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.266976 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": container with ID starting with dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d not found: ID does not exist" containerID="dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267027 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} err="failed to get container status \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": rpc error: code = NotFound desc = could not find container \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": container with ID starting with dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267044 4813 scope.go:117] "RemoveContainer" containerID="257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.267271 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": container with ID starting with 257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495 not found: ID does not exist" containerID="257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267323 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} err="failed to get container status \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": rpc error: code = NotFound desc = could not find container \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": container with ID starting with 257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267341 4813 scope.go:117] "RemoveContainer" containerID="bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.267602 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": container with ID starting with bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c not found: ID does not exist" containerID="bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267627 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} err="failed to get container status \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": rpc error: code = NotFound desc = could not find container \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": container with ID starting with bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267645 4813 scope.go:117] "RemoveContainer" containerID="4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.267914 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": container with ID starting with 4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0 not found: ID does not exist" containerID="4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267942 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} err="failed to get container status \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": rpc error: code = NotFound desc = could not find container \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": container with ID starting with 4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.267959 4813 scope.go:117] "RemoveContainer" containerID="186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.268167 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": container with ID starting with 186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5 not found: ID does not exist" containerID="186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.268219 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} err="failed to get container status \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": rpc error: code = NotFound desc = could not find container \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": container with ID starting with 186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.268237 4813 scope.go:117] "RemoveContainer" containerID="3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.268468 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": container with ID starting with 3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf not found: ID does not exist" containerID="3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.268567 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} err="failed to get container status \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": rpc error: code = NotFound desc = could not find container \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": container with ID starting with 3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.268587 4813 scope.go:117] "RemoveContainer" containerID="aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.268874 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": container with ID starting with aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a not found: ID does not exist" containerID="aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.268916 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} err="failed to get container status \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": rpc error: code = NotFound desc = could not find container \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": container with ID starting with aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.268942 4813 scope.go:117] "RemoveContainer" containerID="eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c" Mar 20 15:52:23 crc kubenswrapper[4813]: E0320 15:52:23.269184 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": container with ID starting with eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c not found: ID does not exist" containerID="eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.269238 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} err="failed to get container status \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": rpc error: code = NotFound desc = could not find container \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": container with ID starting with eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.269256 4813 scope.go:117] "RemoveContainer" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.269543 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} err="failed to get container status \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": rpc error: code = NotFound desc = could not find container \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": container with ID starting with 4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.269574 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.269837 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} err="failed to get container status \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": rpc error: code = NotFound desc = could not find container \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": container with ID starting with 376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.269898 4813 scope.go:117] "RemoveContainer" containerID="dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.270205 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} err="failed to get container status \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": rpc error: code = NotFound desc = could not find container \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": container with ID starting with dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.270251 4813 scope.go:117] "RemoveContainer" containerID="257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.270525 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} err="failed to get container status \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": rpc error: code = NotFound desc = could not find container \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": container with ID starting with 257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.270571 4813 scope.go:117] "RemoveContainer" containerID="bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.270775 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} err="failed to get container status \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": rpc error: code = NotFound desc = could not find container \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": container with ID starting with bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.270797 4813 scope.go:117] "RemoveContainer" containerID="4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.270997 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} err="failed to get container status \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": rpc error: code = NotFound desc = could not find container \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": container with ID starting with 4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271018 4813 scope.go:117] "RemoveContainer" containerID="186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271204 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} err="failed to get container status \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": rpc error: code = NotFound desc = could not find container \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": container with ID starting with 186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271227 4813 scope.go:117] "RemoveContainer" containerID="3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271430 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} err="failed to get container status \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": rpc error: code = NotFound desc = could not find container \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": container with ID starting with 3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271452 4813 scope.go:117] "RemoveContainer" containerID="aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271693 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} err="failed to get container status \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": rpc error: code = NotFound desc = could not find container \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": container with ID starting with aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271737 4813 scope.go:117] "RemoveContainer" containerID="eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271944 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} err="failed to get container status \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": rpc error: code = NotFound desc = could not find container \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": container with ID starting with eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.271992 4813 scope.go:117] "RemoveContainer" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.272181 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} err="failed to get container status \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": rpc error: code = NotFound desc = could not find container \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": container with ID starting with 4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.272204 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.272379 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} err="failed to get container status \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": rpc error: code = NotFound desc = could not find container \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": container with ID starting with 376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.272426 4813 scope.go:117] "RemoveContainer" containerID="dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.273564 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} err="failed to get container status \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": rpc error: code = NotFound desc = could not find container \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": container with ID starting with dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.273593 4813 scope.go:117] "RemoveContainer" containerID="257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.273821 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} err="failed to get container status \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": rpc error: code = NotFound desc = could not find container \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": container with ID starting with 257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.273900 4813 scope.go:117] "RemoveContainer" containerID="bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.274413 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} err="failed to get container status \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": rpc error: code = NotFound desc = could not find container \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": container with ID starting with bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.274437 4813 scope.go:117] "RemoveContainer" containerID="4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.274761 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} err="failed to get container status \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": rpc error: code = NotFound desc = could not find container \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": container with ID starting with 4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.274789 4813 scope.go:117] "RemoveContainer" containerID="186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.275015 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} err="failed to get container status \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": rpc error: code = NotFound desc = could not find container \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": container with ID starting with 186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.275059 4813 scope.go:117] "RemoveContainer" containerID="3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.275265 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} err="failed to get container status \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": rpc error: code = NotFound desc = could not find container \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": container with ID starting with 3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.275310 4813 scope.go:117] "RemoveContainer" containerID="aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.275821 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} err="failed to get container status \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": rpc error: code = NotFound desc = could not find container \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": container with ID starting with aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.275859 4813 scope.go:117] "RemoveContainer" containerID="eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.276095 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} err="failed to get container status \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": rpc error: code = NotFound desc = could not find container \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": container with ID starting with eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.276119 4813 scope.go:117] "RemoveContainer" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.276430 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} err="failed to get container status \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": rpc error: code = NotFound desc = could not find container \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": container with ID starting with 4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.276471 4813 scope.go:117] "RemoveContainer" containerID="376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.276832 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f"} err="failed to get container status \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": rpc error: code = NotFound desc = could not find container \"376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f\": container with ID starting with 376034a850f155aaadbf72d2775b3314ffe58556f0cba64fc5aaa3fa0a1abd4f not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.276859 4813 scope.go:117] "RemoveContainer" containerID="dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.277110 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d"} err="failed to get container status \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": rpc error: code = NotFound desc = could not find container \"dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d\": container with ID starting with dfa955334dcb4a5a3b05c5b1286d3b587e8034b5a9a410df6250793a7fbb051d not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.277153 4813 scope.go:117] "RemoveContainer" containerID="257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.277345 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495"} err="failed to get container status \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": rpc error: code = NotFound desc = could not find container \"257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495\": container with ID starting with 257d817456f933f3933102f1fff7609a31b44bddb9614982b7f669d81b07f495 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.277391 4813 scope.go:117] "RemoveContainer" containerID="bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.277989 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c"} err="failed to get container status \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": rpc error: code = NotFound desc = could not find container \"bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c\": container with ID starting with bc119ecac019709a4edfa8c67bed7f3451d2f211782fb685dec563a0d6eeff1c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.278012 4813 scope.go:117] "RemoveContainer" containerID="4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.278231 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0"} err="failed to get container status \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": rpc error: code = NotFound desc = could not find container \"4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0\": container with ID starting with 4d7fcda728d13bb631d40cef65f9c1a55af0ca74510d28ecc9a4d0a1ee4816f0 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.278275 4813 scope.go:117] "RemoveContainer" containerID="186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.278622 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5"} err="failed to get container status \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": rpc error: code = NotFound desc = could not find container \"186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5\": container with ID starting with 186088a376bb76aabb22998282613f0a19958a2f52ce9933521e81a28a905de5 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.278645 4813 scope.go:117] "RemoveContainer" containerID="3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.278937 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf"} err="failed to get container status \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": rpc error: code = NotFound desc = could not find container \"3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf\": container with ID starting with 3359fcddfaf4168ae17e8f95cd89cd889dc7d4792d8461a4bb70e5999e79c0cf not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.278965 4813 scope.go:117] "RemoveContainer" containerID="aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.279208 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a"} err="failed to get container status \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": rpc error: code = NotFound desc = could not find container \"aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a\": container with ID starting with aafe992a54a83fe5d2f0b8dadf7beeb2c15c7534f764d3eed29a31faf688443a not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.279231 4813 scope.go:117] "RemoveContainer" containerID="eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.279451 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c"} err="failed to get container status \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": rpc error: code = NotFound desc = could not find container \"eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c\": container with ID starting with eef0f1491316147da977f2b25d7238dc2a236617efaa9a8c20bf8b60a861189c not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.279474 4813 scope.go:117] "RemoveContainer" containerID="4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.279718 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9"} err="failed to get container status \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": rpc error: code = NotFound desc = could not find container \"4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9\": container with ID starting with 4c06d42609e0414280458f0b6e6173bad899292bfffa697279e70af7d2d8a7a9 not found: ID does not exist" Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.290023 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:23 crc kubenswrapper[4813]: W0320 15:52:23.306575 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95b45441_da8b_429c_ba9a_f4b621be3d37.slice/crio-e03192704885d90494722aa52e65af0fb56dffa0405b755787f1e9a5fef63945 WatchSource:0}: Error finding container e03192704885d90494722aa52e65af0fb56dffa0405b755787f1e9a5fef63945: Status 404 returned error can't find the container with id e03192704885d90494722aa52e65af0fb56dffa0405b755787f1e9a5fef63945 Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.371155 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dvvsh"] Mar 20 15:52:23 crc kubenswrapper[4813]: I0320 15:52:23.377168 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dvvsh"] Mar 20 15:52:24 crc kubenswrapper[4813]: I0320 15:52:24.048307 4813 generic.go:334] "Generic (PLEG): container finished" podID="95b45441-da8b-429c-ba9a-f4b621be3d37" containerID="8cc73c74e5830999349f2c988eafc62ae2a3b952a6f189426740fcb130500c58" exitCode=0 Mar 20 15:52:24 crc kubenswrapper[4813]: I0320 15:52:24.048416 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerDied","Data":"8cc73c74e5830999349f2c988eafc62ae2a3b952a6f189426740fcb130500c58"} Mar 20 15:52:24 crc kubenswrapper[4813]: I0320 15:52:24.048862 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"e03192704885d90494722aa52e65af0fb56dffa0405b755787f1e9a5fef63945"} Mar 20 15:52:24 crc kubenswrapper[4813]: I0320 15:52:24.054084 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/2.log" Mar 20 15:52:24 crc kubenswrapper[4813]: I0320 15:52:24.054921 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/1.log" Mar 20 15:52:24 crc kubenswrapper[4813]: I0320 15:52:24.055002 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gpmgw" event={"ID":"a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1","Type":"ContainerStarted","Data":"fc81049129625e880e32fbdd669f4f052912c061f3177c35984f636bb482a1b2"} Mar 20 15:52:25 crc kubenswrapper[4813]: I0320 15:52:25.063995 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"4a4036df6c2c8e505690937320cdeeb311f67a9278bd1d47d21022207edc4abe"} Mar 20 15:52:25 crc kubenswrapper[4813]: I0320 15:52:25.064296 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"61c59133b9ffd2c93ffdc845a81e5fa7913064117acedd651f9ebab8580ee1d5"} Mar 20 15:52:25 crc kubenswrapper[4813]: I0320 15:52:25.064307 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"93e901ddd4c2a0ecd85188e4c02c825a4aba337ed42272390a33021eb2138bf9"} Mar 20 15:52:25 crc kubenswrapper[4813]: I0320 15:52:25.064315 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"c6848bf50e599abc9775b78ea9a3b182af33e84c90b6f46eb129135127a53644"} Mar 20 15:52:25 crc kubenswrapper[4813]: I0320 15:52:25.064324 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"c57284817d7af479f816e986ebb588a6933a05f866e0c53eb91dd24cb3d0fcf1"} Mar 20 15:52:25 crc kubenswrapper[4813]: I0320 15:52:25.064331 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"a972665c2836a23477c309b462d9180633a7e04e522b0109473417001a133358"} Mar 20 15:52:25 crc kubenswrapper[4813]: I0320 15:52:25.270937 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32fae70f-6b1f-4935-9747-8080c9feb514" path="/var/lib/kubelet/pods/32fae70f-6b1f-4935-9747-8080c9feb514/volumes" Mar 20 15:52:27 crc kubenswrapper[4813]: I0320 15:52:27.209121 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:27 crc kubenswrapper[4813]: I0320 15:52:27.209173 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:28 crc kubenswrapper[4813]: I0320 15:52:28.266121 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2krpb" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="registry-server" probeResult="failure" output=< Mar 20 15:52:28 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Mar 20 15:52:28 crc kubenswrapper[4813]: > Mar 20 15:52:29 crc kubenswrapper[4813]: I0320 15:52:29.091008 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"25ac76e9a9038ec78cb282338a5891502477d67b034a51cd7320a3f9401a2c7e"} Mar 20 15:52:30 crc kubenswrapper[4813]: I0320 15:52:30.951077 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw"] Mar 20 15:52:30 crc kubenswrapper[4813]: I0320 15:52:30.951972 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:30 crc kubenswrapper[4813]: I0320 15:52:30.954505 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Mar 20 15:52:30 crc kubenswrapper[4813]: I0320 15:52:30.954641 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Mar 20 15:52:30 crc kubenswrapper[4813]: I0320 15:52:30.955002 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-pkrnt" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.106578 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" event={"ID":"95b45441-da8b-429c-ba9a-f4b621be3d37","Type":"ContainerStarted","Data":"da463c373250379661f45fbfa9cc940052548f33286a993dcb4c51750f0cbe5a"} Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.106920 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.106940 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.107874 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm2b8\" (UniqueName: \"kubernetes.io/projected/13a0d7fe-7887-4ea4-ae5b-b47d8689373b-kube-api-access-lm2b8\") pod \"obo-prometheus-operator-8ff7d675-zbjnw\" (UID: \"13a0d7fe-7887-4ea4-ae5b-b47d8689373b\") " pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.133977 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.141698 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" podStartSLOduration=9.141674198 podStartE2EDuration="9.141674198s" podCreationTimestamp="2026-03-20 15:52:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:52:31.137090934 +0000 UTC m=+880.559793775" watchObservedRunningTime="2026-03-20 15:52:31.141674198 +0000 UTC m=+880.564377039" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.209000 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm2b8\" (UniqueName: \"kubernetes.io/projected/13a0d7fe-7887-4ea4-ae5b-b47d8689373b-kube-api-access-lm2b8\") pod \"obo-prometheus-operator-8ff7d675-zbjnw\" (UID: \"13a0d7fe-7887-4ea4-ae5b-b47d8689373b\") " pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.223054 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb"] Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.223675 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.225957 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.226155 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-nnl5t" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.248680 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn"] Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.249306 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.250113 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm2b8\" (UniqueName: \"kubernetes.io/projected/13a0d7fe-7887-4ea4-ae5b-b47d8689373b-kube-api-access-lm2b8\") pod \"obo-prometheus-operator-8ff7d675-zbjnw\" (UID: \"13a0d7fe-7887-4ea4-ae5b-b47d8689373b\") " pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.266712 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.291241 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(cca2549696cbcbb072a0e66a9c869214d75c742ca12a3417b60c76eb507ca52a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.291300 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(cca2549696cbcbb072a0e66a9c869214d75c742ca12a3417b60c76eb507ca52a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.291320 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(cca2549696cbcbb072a0e66a9c869214d75c742ca12a3417b60c76eb507ca52a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.291360 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators(13a0d7fe-7887-4ea4-ae5b-b47d8689373b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators(13a0d7fe-7887-4ea4-ae5b-b47d8689373b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(cca2549696cbcbb072a0e66a9c869214d75c742ca12a3417b60c76eb507ca52a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" podUID="13a0d7fe-7887-4ea4-ae5b-b47d8689373b" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.410297 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ac554a5-8e12-415d-845c-72c909a7d1d2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb\" (UID: \"4ac554a5-8e12-415d-845c-72c909a7d1d2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.410423 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fb3c3550-7cd9-4286-8ab3-e554c159b357-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn\" (UID: \"fb3c3550-7cd9-4286-8ab3-e554c159b357\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.410542 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fb3c3550-7cd9-4286-8ab3-e554c159b357-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn\" (UID: \"fb3c3550-7cd9-4286-8ab3-e554c159b357\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.411415 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ac554a5-8e12-415d-845c-72c909a7d1d2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb\" (UID: \"4ac554a5-8e12-415d-845c-72c909a7d1d2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.512227 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ac554a5-8e12-415d-845c-72c909a7d1d2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb\" (UID: \"4ac554a5-8e12-415d-845c-72c909a7d1d2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.512305 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fb3c3550-7cd9-4286-8ab3-e554c159b357-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn\" (UID: \"fb3c3550-7cd9-4286-8ab3-e554c159b357\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.512347 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fb3c3550-7cd9-4286-8ab3-e554c159b357-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn\" (UID: \"fb3c3550-7cd9-4286-8ab3-e554c159b357\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.512385 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ac554a5-8e12-415d-845c-72c909a7d1d2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb\" (UID: \"4ac554a5-8e12-415d-845c-72c909a7d1d2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.520102 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fb3c3550-7cd9-4286-8ab3-e554c159b357-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn\" (UID: \"fb3c3550-7cd9-4286-8ab3-e554c159b357\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.520970 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ac554a5-8e12-415d-845c-72c909a7d1d2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb\" (UID: \"4ac554a5-8e12-415d-845c-72c909a7d1d2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.520989 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fb3c3550-7cd9-4286-8ab3-e554c159b357-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn\" (UID: \"fb3c3550-7cd9-4286-8ab3-e554c159b357\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.521106 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ac554a5-8e12-415d-845c-72c909a7d1d2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb\" (UID: \"4ac554a5-8e12-415d-845c-72c909a7d1d2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.569168 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.582606 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.597211 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-6dd7dd855f-bvk5s"] Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.598036 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.600577 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(055b406c27dcdd76cf2a80ec56b313c745cb8c3c26902ea62450663bde721378): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.600638 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(055b406c27dcdd76cf2a80ec56b313c745cb8c3c26902ea62450663bde721378): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.600665 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(055b406c27dcdd76cf2a80ec56b313c745cb8c3c26902ea62450663bde721378): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.600714 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators(4ac554a5-8e12-415d-845c-72c909a7d1d2)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators(4ac554a5-8e12-415d-845c-72c909a7d1d2)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(055b406c27dcdd76cf2a80ec56b313c745cb8c3c26902ea62450663bde721378): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" podUID="4ac554a5-8e12-415d-845c-72c909a7d1d2" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.600800 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-phnhc" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.601059 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.620931 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(df5325c4cf5dbaf8daf990b45412844a607c35b379805909e5b04291b7ee4eb3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.621255 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(df5325c4cf5dbaf8daf990b45412844a607c35b379805909e5b04291b7ee4eb3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.621277 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(df5325c4cf5dbaf8daf990b45412844a607c35b379805909e5b04291b7ee4eb3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.621326 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators(fb3c3550-7cd9-4286-8ab3-e554c159b357)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators(fb3c3550-7cd9-4286-8ab3-e554c159b357)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(df5325c4cf5dbaf8daf990b45412844a607c35b379805909e5b04291b7ee4eb3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" podUID="fb3c3550-7cd9-4286-8ab3-e554c159b357" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.714731 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/9cbfb8c9-4eb6-4382-b8b1-572029001cc0-observability-operator-tls\") pod \"observability-operator-6dd7dd855f-bvk5s\" (UID: \"9cbfb8c9-4eb6-4382-b8b1-572029001cc0\") " pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.714797 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcrjq\" (UniqueName: \"kubernetes.io/projected/9cbfb8c9-4eb6-4382-b8b1-572029001cc0-kube-api-access-rcrjq\") pod \"observability-operator-6dd7dd855f-bvk5s\" (UID: \"9cbfb8c9-4eb6-4382-b8b1-572029001cc0\") " pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.816048 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/9cbfb8c9-4eb6-4382-b8b1-572029001cc0-observability-operator-tls\") pod \"observability-operator-6dd7dd855f-bvk5s\" (UID: \"9cbfb8c9-4eb6-4382-b8b1-572029001cc0\") " pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.816110 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcrjq\" (UniqueName: \"kubernetes.io/projected/9cbfb8c9-4eb6-4382-b8b1-572029001cc0-kube-api-access-rcrjq\") pod \"observability-operator-6dd7dd855f-bvk5s\" (UID: \"9cbfb8c9-4eb6-4382-b8b1-572029001cc0\") " pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.820450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/9cbfb8c9-4eb6-4382-b8b1-572029001cc0-observability-operator-tls\") pod \"observability-operator-6dd7dd855f-bvk5s\" (UID: \"9cbfb8c9-4eb6-4382-b8b1-572029001cc0\") " pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.833723 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcrjq\" (UniqueName: \"kubernetes.io/projected/9cbfb8c9-4eb6-4382-b8b1-572029001cc0-kube-api-access-rcrjq\") pod \"observability-operator-6dd7dd855f-bvk5s\" (UID: \"9cbfb8c9-4eb6-4382-b8b1-572029001cc0\") " pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.940102 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb"] Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.955740 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-6dd7dd855f-bvk5s"] Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.958207 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.974253 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw"] Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.975847 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(fd2e2b1276c8d653b5a7601947ad494eeb644bc339b800c084404288ea0fe3af): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.975923 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(fd2e2b1276c8d653b5a7601947ad494eeb644bc339b800c084404288ea0fe3af): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.975951 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(fd2e2b1276c8d653b5a7601947ad494eeb644bc339b800c084404288ea0fe3af): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:31 crc kubenswrapper[4813]: E0320 15:52:31.976000 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-6dd7dd855f-bvk5s_openshift-operators(9cbfb8c9-4eb6-4382-b8b1-572029001cc0)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-6dd7dd855f-bvk5s_openshift-operators(9cbfb8c9-4eb6-4382-b8b1-572029001cc0)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(fd2e2b1276c8d653b5a7601947ad494eeb644bc339b800c084404288ea0fe3af): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" podUID="9cbfb8c9-4eb6-4382-b8b1-572029001cc0" Mar 20 15:52:31 crc kubenswrapper[4813]: I0320 15:52:31.984405 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn"] Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.032990 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-b9bc87685-dshn8"] Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.033824 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.037369 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-6vs5v" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.037638 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-service-cert" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.065451 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-b9bc87685-dshn8"] Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.110922 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.111615 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.111962 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.112169 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.112329 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.112568 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.112638 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.113464 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.113599 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.121187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/95615498-4862-4d52-9c91-6f2f4f17d4ba-openshift-service-ca\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.121236 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trwrs\" (UniqueName: \"kubernetes.io/projected/95615498-4862-4d52-9c91-6f2f4f17d4ba-kube-api-access-trwrs\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.121422 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95615498-4862-4d52-9c91-6f2f4f17d4ba-apiservice-cert\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.121470 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95615498-4862-4d52-9c91-6f2f4f17d4ba-webhook-cert\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.175627 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(4f2f332563b8a955d1a410f5374f7f875f1305ee8edc2489809dade8cd01baab): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.175690 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(4f2f332563b8a955d1a410f5374f7f875f1305ee8edc2489809dade8cd01baab): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.175713 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(4f2f332563b8a955d1a410f5374f7f875f1305ee8edc2489809dade8cd01baab): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.175769 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators(4ac554a5-8e12-415d-845c-72c909a7d1d2)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators(4ac554a5-8e12-415d-845c-72c909a7d1d2)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_openshift-operators_4ac554a5-8e12-415d-845c-72c909a7d1d2_0(4f2f332563b8a955d1a410f5374f7f875f1305ee8edc2489809dade8cd01baab): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" podUID="4ac554a5-8e12-415d-845c-72c909a7d1d2" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.185134 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.192793 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(2d31655f6b0947b760a29a0d2ff8d9b73276cd5cbd185a26879015c91c40174d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.192851 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(2d31655f6b0947b760a29a0d2ff8d9b73276cd5cbd185a26879015c91c40174d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.192873 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(2d31655f6b0947b760a29a0d2ff8d9b73276cd5cbd185a26879015c91c40174d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.192914 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-6dd7dd855f-bvk5s_openshift-operators(9cbfb8c9-4eb6-4382-b8b1-572029001cc0)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-6dd7dd855f-bvk5s_openshift-operators(9cbfb8c9-4eb6-4382-b8b1-572029001cc0)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-6dd7dd855f-bvk5s_openshift-operators_9cbfb8c9-4eb6-4382-b8b1-572029001cc0_0(2d31655f6b0947b760a29a0d2ff8d9b73276cd5cbd185a26879015c91c40174d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" podUID="9cbfb8c9-4eb6-4382-b8b1-572029001cc0" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.198829 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(18791ac61babed78fa00a29a1205684a3d386f6994e2e7dfa676b19d3a967def): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.198904 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(18791ac61babed78fa00a29a1205684a3d386f6994e2e7dfa676b19d3a967def): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.198933 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(18791ac61babed78fa00a29a1205684a3d386f6994e2e7dfa676b19d3a967def): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.198981 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators(fb3c3550-7cd9-4286-8ab3-e554c159b357)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators(fb3c3550-7cd9-4286-8ab3-e554c159b357)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_openshift-operators_fb3c3550-7cd9-4286-8ab3-e554c159b357_0(18791ac61babed78fa00a29a1205684a3d386f6994e2e7dfa676b19d3a967def): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" podUID="fb3c3550-7cd9-4286-8ab3-e554c159b357" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.215920 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(e4fba9418f04a84d3ff5ce7b07d5e243705523228ed2d3c2d622682a019ccc54): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.215992 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(e4fba9418f04a84d3ff5ce7b07d5e243705523228ed2d3c2d622682a019ccc54): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.216011 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(e4fba9418f04a84d3ff5ce7b07d5e243705523228ed2d3c2d622682a019ccc54): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.216053 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators(13a0d7fe-7887-4ea4-ae5b-b47d8689373b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators(13a0d7fe-7887-4ea4-ae5b-b47d8689373b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-8ff7d675-zbjnw_openshift-operators_13a0d7fe-7887-4ea4-ae5b-b47d8689373b_0(e4fba9418f04a84d3ff5ce7b07d5e243705523228ed2d3c2d622682a019ccc54): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" podUID="13a0d7fe-7887-4ea4-ae5b-b47d8689373b" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.223946 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95615498-4862-4d52-9c91-6f2f4f17d4ba-apiservice-cert\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.223991 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95615498-4862-4d52-9c91-6f2f4f17d4ba-webhook-cert\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.224121 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/95615498-4862-4d52-9c91-6f2f4f17d4ba-openshift-service-ca\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.224143 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trwrs\" (UniqueName: \"kubernetes.io/projected/95615498-4862-4d52-9c91-6f2f4f17d4ba-kube-api-access-trwrs\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.225246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/95615498-4862-4d52-9c91-6f2f4f17d4ba-openshift-service-ca\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.229415 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95615498-4862-4d52-9c91-6f2f4f17d4ba-apiservice-cert\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.230024 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95615498-4862-4d52-9c91-6f2f4f17d4ba-webhook-cert\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.242420 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trwrs\" (UniqueName: \"kubernetes.io/projected/95615498-4862-4d52-9c91-6f2f4f17d4ba-kube-api-access-trwrs\") pod \"perses-operator-b9bc87685-dshn8\" (UID: \"95615498-4862-4d52-9c91-6f2f4f17d4ba\") " pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: I0320 15:52:32.353589 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.373732 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(759cbfca508318163c3a1773f8887434455b715e3893b626a7bd247ab6db8765): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.373800 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(759cbfca508318163c3a1773f8887434455b715e3893b626a7bd247ab6db8765): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.373820 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(759cbfca508318163c3a1773f8887434455b715e3893b626a7bd247ab6db8765): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:32 crc kubenswrapper[4813]: E0320 15:52:32.373865 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-b9bc87685-dshn8_openshift-operators(95615498-4862-4d52-9c91-6f2f4f17d4ba)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-b9bc87685-dshn8_openshift-operators(95615498-4862-4d52-9c91-6f2f4f17d4ba)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(759cbfca508318163c3a1773f8887434455b715e3893b626a7bd247ab6db8765): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-b9bc87685-dshn8" podUID="95615498-4862-4d52-9c91-6f2f4f17d4ba" Mar 20 15:52:33 crc kubenswrapper[4813]: I0320 15:52:33.115549 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:33 crc kubenswrapper[4813]: I0320 15:52:33.116541 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:33 crc kubenswrapper[4813]: E0320 15:52:33.159771 4813 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(4d68a1b89ebeac02ca62006e257b8f4e92d654b456a62b7678cfa5819166d511): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 15:52:33 crc kubenswrapper[4813]: E0320 15:52:33.159830 4813 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(4d68a1b89ebeac02ca62006e257b8f4e92d654b456a62b7678cfa5819166d511): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:33 crc kubenswrapper[4813]: E0320 15:52:33.159852 4813 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(4d68a1b89ebeac02ca62006e257b8f4e92d654b456a62b7678cfa5819166d511): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:33 crc kubenswrapper[4813]: E0320 15:52:33.159894 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-b9bc87685-dshn8_openshift-operators(95615498-4862-4d52-9c91-6f2f4f17d4ba)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-b9bc87685-dshn8_openshift-operators(95615498-4862-4d52-9c91-6f2f4f17d4ba)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-b9bc87685-dshn8_openshift-operators_95615498-4862-4d52-9c91-6f2f4f17d4ba_0(4d68a1b89ebeac02ca62006e257b8f4e92d654b456a62b7678cfa5819166d511): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-b9bc87685-dshn8" podUID="95615498-4862-4d52-9c91-6f2f4f17d4ba" Mar 20 15:52:33 crc kubenswrapper[4813]: I0320 15:52:33.842576 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:52:33 crc kubenswrapper[4813]: I0320 15:52:33.842644 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:52:37 crc kubenswrapper[4813]: I0320 15:52:37.249623 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:37 crc kubenswrapper[4813]: I0320 15:52:37.288305 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.059888 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2krpb"] Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.060396 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2krpb" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="registry-server" containerID="cri-o://beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202" gracePeriod=2 Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.383657 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.528991 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvn9c\" (UniqueName: \"kubernetes.io/projected/dab66f52-4455-43e4-a3de-65d2611686d6-kube-api-access-vvn9c\") pod \"dab66f52-4455-43e4-a3de-65d2611686d6\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.529047 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-utilities\") pod \"dab66f52-4455-43e4-a3de-65d2611686d6\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.529086 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-catalog-content\") pod \"dab66f52-4455-43e4-a3de-65d2611686d6\" (UID: \"dab66f52-4455-43e4-a3de-65d2611686d6\") " Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.530172 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-utilities" (OuterVolumeSpecName: "utilities") pod "dab66f52-4455-43e4-a3de-65d2611686d6" (UID: "dab66f52-4455-43e4-a3de-65d2611686d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.535960 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dab66f52-4455-43e4-a3de-65d2611686d6-kube-api-access-vvn9c" (OuterVolumeSpecName: "kube-api-access-vvn9c") pod "dab66f52-4455-43e4-a3de-65d2611686d6" (UID: "dab66f52-4455-43e4-a3de-65d2611686d6"). InnerVolumeSpecName "kube-api-access-vvn9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.630725 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.630762 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvn9c\" (UniqueName: \"kubernetes.io/projected/dab66f52-4455-43e4-a3de-65d2611686d6-kube-api-access-vvn9c\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.666590 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dab66f52-4455-43e4-a3de-65d2611686d6" (UID: "dab66f52-4455-43e4-a3de-65d2611686d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:52:40 crc kubenswrapper[4813]: I0320 15:52:40.731849 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dab66f52-4455-43e4-a3de-65d2611686d6-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.159233 4813 generic.go:334] "Generic (PLEG): container finished" podID="dab66f52-4455-43e4-a3de-65d2611686d6" containerID="beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202" exitCode=0 Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.159287 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2krpb" event={"ID":"dab66f52-4455-43e4-a3de-65d2611686d6","Type":"ContainerDied","Data":"beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202"} Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.159313 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2krpb" event={"ID":"dab66f52-4455-43e4-a3de-65d2611686d6","Type":"ContainerDied","Data":"820cf214a4e45ef987f941e7ed74fa81e8b6515ef459bfccfc31324ba93a7ac9"} Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.159340 4813 scope.go:117] "RemoveContainer" containerID="beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.159352 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2krpb" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.173806 4813 scope.go:117] "RemoveContainer" containerID="90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.203295 4813 scope.go:117] "RemoveContainer" containerID="78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.217977 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2krpb"] Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.223687 4813 scope.go:117] "RemoveContainer" containerID="beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.227558 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2krpb"] Mar 20 15:52:41 crc kubenswrapper[4813]: E0320 15:52:41.231949 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202\": container with ID starting with beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202 not found: ID does not exist" containerID="beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.231997 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202"} err="failed to get container status \"beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202\": rpc error: code = NotFound desc = could not find container \"beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202\": container with ID starting with beb463383838a222a15a467537c9980b6deffe69731eff1704132630ce5e4202 not found: ID does not exist" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.232026 4813 scope.go:117] "RemoveContainer" containerID="90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f" Mar 20 15:52:41 crc kubenswrapper[4813]: E0320 15:52:41.232457 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f\": container with ID starting with 90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f not found: ID does not exist" containerID="90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.232491 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f"} err="failed to get container status \"90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f\": rpc error: code = NotFound desc = could not find container \"90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f\": container with ID starting with 90beaa9abb1b5e19e182450b12b46146fba010b257a17d3bd1ff5916eee49d6f not found: ID does not exist" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.232504 4813 scope.go:117] "RemoveContainer" containerID="78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb" Mar 20 15:52:41 crc kubenswrapper[4813]: E0320 15:52:41.232796 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb\": container with ID starting with 78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb not found: ID does not exist" containerID="78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.232848 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb"} err="failed to get container status \"78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb\": rpc error: code = NotFound desc = could not find container \"78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb\": container with ID starting with 78e4d136e0728f46c8d9f09b57e0e6db43a5bc8039bf973bc28f61576d4c02eb not found: ID does not exist" Mar 20 15:52:41 crc kubenswrapper[4813]: I0320 15:52:41.276344 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" path="/var/lib/kubelet/pods/dab66f52-4455-43e4-a3de-65d2611686d6/volumes" Mar 20 15:52:43 crc kubenswrapper[4813]: I0320 15:52:43.264971 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:43 crc kubenswrapper[4813]: I0320 15:52:43.264983 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:43 crc kubenswrapper[4813]: I0320 15:52:43.265704 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" Mar 20 15:52:43 crc kubenswrapper[4813]: I0320 15:52:43.265880 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:43 crc kubenswrapper[4813]: I0320 15:52:43.519632 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb"] Mar 20 15:52:43 crc kubenswrapper[4813]: W0320 15:52:43.528730 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ac554a5_8e12_415d_845c_72c909a7d1d2.slice/crio-2725e9686aaadcae3b2f124777e32b85db0bc42a64b71b35484521cd0d51174d WatchSource:0}: Error finding container 2725e9686aaadcae3b2f124777e32b85db0bc42a64b71b35484521cd0d51174d: Status 404 returned error can't find the container with id 2725e9686aaadcae3b2f124777e32b85db0bc42a64b71b35484521cd0d51174d Mar 20 15:52:43 crc kubenswrapper[4813]: I0320 15:52:43.763391 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-6dd7dd855f-bvk5s"] Mar 20 15:52:43 crc kubenswrapper[4813]: W0320 15:52:43.773264 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cbfb8c9_4eb6_4382_b8b1_572029001cc0.slice/crio-61e4c0c5d1f8bfd3c71edd55f48850391544665e18baaead49899259b68e2d4b WatchSource:0}: Error finding container 61e4c0c5d1f8bfd3c71edd55f48850391544665e18baaead49899259b68e2d4b: Status 404 returned error can't find the container with id 61e4c0c5d1f8bfd3c71edd55f48850391544665e18baaead49899259b68e2d4b Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.176862 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" event={"ID":"9cbfb8c9-4eb6-4382-b8b1-572029001cc0","Type":"ContainerStarted","Data":"61e4c0c5d1f8bfd3c71edd55f48850391544665e18baaead49899259b68e2d4b"} Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.177793 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" event={"ID":"4ac554a5-8e12-415d-845c-72c909a7d1d2","Type":"ContainerStarted","Data":"2725e9686aaadcae3b2f124777e32b85db0bc42a64b71b35484521cd0d51174d"} Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.265579 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.265594 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.266020 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.266108 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.729924 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw"] Mar 20 15:52:44 crc kubenswrapper[4813]: W0320 15:52:44.732043 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13a0d7fe_7887_4ea4_ae5b_b47d8689373b.slice/crio-f3bdb4b08e01cf251e56649c721598e5b78f7264150c20c5a166361c7bee4137 WatchSource:0}: Error finding container f3bdb4b08e01cf251e56649c721598e5b78f7264150c20c5a166361c7bee4137: Status 404 returned error can't find the container with id f3bdb4b08e01cf251e56649c721598e5b78f7264150c20c5a166361c7bee4137 Mar 20 15:52:44 crc kubenswrapper[4813]: I0320 15:52:44.779474 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn"] Mar 20 15:52:45 crc kubenswrapper[4813]: I0320 15:52:45.185344 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" event={"ID":"fb3c3550-7cd9-4286-8ab3-e554c159b357","Type":"ContainerStarted","Data":"a63eea080e455343182d97eadb31a989d36fa9ac858a6687c3ec62395f14cfa1"} Mar 20 15:52:45 crc kubenswrapper[4813]: I0320 15:52:45.186676 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" event={"ID":"13a0d7fe-7887-4ea4-ae5b-b47d8689373b","Type":"ContainerStarted","Data":"f3bdb4b08e01cf251e56649c721598e5b78f7264150c20c5a166361c7bee4137"} Mar 20 15:52:47 crc kubenswrapper[4813]: I0320 15:52:47.266372 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:47 crc kubenswrapper[4813]: I0320 15:52:47.267049 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:53 crc kubenswrapper[4813]: I0320 15:52:53.312571 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9dxkv" Mar 20 15:52:55 crc kubenswrapper[4813]: I0320 15:52:55.592595 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-b9bc87685-dshn8"] Mar 20 15:52:55 crc kubenswrapper[4813]: W0320 15:52:55.599614 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95615498_4862_4d52_9c91_6f2f4f17d4ba.slice/crio-68900cb1811836d16c68f41212a3deb2fa5c2a32189822f637b1dd7657f6ae3e WatchSource:0}: Error finding container 68900cb1811836d16c68f41212a3deb2fa5c2a32189822f637b1dd7657f6ae3e: Status 404 returned error can't find the container with id 68900cb1811836d16c68f41212a3deb2fa5c2a32189822f637b1dd7657f6ae3e Mar 20 15:52:55 crc kubenswrapper[4813]: I0320 15:52:55.975114 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" event={"ID":"9cbfb8c9-4eb6-4382-b8b1-572029001cc0","Type":"ContainerStarted","Data":"36498835a74a9aac2d73f205938786c5a3da60529b70aeccc59a28d71f3f87e8"} Mar 20 15:52:55 crc kubenswrapper[4813]: I0320 15:52:55.975370 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:55 crc kubenswrapper[4813]: I0320 15:52:55.978949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" event={"ID":"4ac554a5-8e12-415d-845c-72c909a7d1d2","Type":"ContainerStarted","Data":"3c9b70f5b98277bd912333c9554af075cc54d03b0db5da2a8badc8a26bf606cc"} Mar 20 15:52:55 crc kubenswrapper[4813]: I0320 15:52:55.981340 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" event={"ID":"fb3c3550-7cd9-4286-8ab3-e554c159b357","Type":"ContainerStarted","Data":"82201e0ac49528b7d45e23f6cc8173d04532835cf7b1a41a242e3802277e96b0"} Mar 20 15:52:55 crc kubenswrapper[4813]: I0320 15:52:55.983431 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-b9bc87685-dshn8" event={"ID":"95615498-4862-4d52-9c91-6f2f4f17d4ba","Type":"ContainerStarted","Data":"68900cb1811836d16c68f41212a3deb2fa5c2a32189822f637b1dd7657f6ae3e"} Mar 20 15:52:55 crc kubenswrapper[4813]: I0320 15:52:55.990774 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" event={"ID":"13a0d7fe-7887-4ea4-ae5b-b47d8689373b","Type":"ContainerStarted","Data":"0c9d56e1672052731bba1e372cd6a168d2b07ac5580e9823ec2db140fda1ff3c"} Mar 20 15:52:56 crc kubenswrapper[4813]: I0320 15:52:56.001672 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" podStartSLOduration=13.744857535 podStartE2EDuration="25.001653262s" podCreationTimestamp="2026-03-20 15:52:31 +0000 UTC" firstStartedPulling="2026-03-20 15:52:43.787212554 +0000 UTC m=+893.209915395" lastFinishedPulling="2026-03-20 15:52:55.044008281 +0000 UTC m=+904.466711122" observedRunningTime="2026-03-20 15:52:55.996937245 +0000 UTC m=+905.419640086" watchObservedRunningTime="2026-03-20 15:52:56.001653262 +0000 UTC m=+905.424356113" Mar 20 15:52:56 crc kubenswrapper[4813]: I0320 15:52:56.025523 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn" podStartSLOduration=14.531992781 podStartE2EDuration="25.025508705s" podCreationTimestamp="2026-03-20 15:52:31 +0000 UTC" firstStartedPulling="2026-03-20 15:52:44.78623024 +0000 UTC m=+894.208933111" lastFinishedPulling="2026-03-20 15:52:55.279746194 +0000 UTC m=+904.702449035" observedRunningTime="2026-03-20 15:52:56.022219506 +0000 UTC m=+905.444922367" watchObservedRunningTime="2026-03-20 15:52:56.025508705 +0000 UTC m=+905.448211546" Mar 20 15:52:56 crc kubenswrapper[4813]: I0320 15:52:56.041536 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-6dd7dd855f-bvk5s" Mar 20 15:52:56 crc kubenswrapper[4813]: I0320 15:52:56.046924 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb" podStartSLOduration=13.250485889 podStartE2EDuration="25.046909691s" podCreationTimestamp="2026-03-20 15:52:31 +0000 UTC" firstStartedPulling="2026-03-20 15:52:43.530160605 +0000 UTC m=+892.952863446" lastFinishedPulling="2026-03-20 15:52:55.326584407 +0000 UTC m=+904.749287248" observedRunningTime="2026-03-20 15:52:56.044832975 +0000 UTC m=+905.467535816" watchObservedRunningTime="2026-03-20 15:52:56.046909691 +0000 UTC m=+905.469612532" Mar 20 15:52:56 crc kubenswrapper[4813]: I0320 15:52:56.078217 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-8ff7d675-zbjnw" podStartSLOduration=15.76908266 podStartE2EDuration="26.078196745s" podCreationTimestamp="2026-03-20 15:52:30 +0000 UTC" firstStartedPulling="2026-03-20 15:52:44.734855515 +0000 UTC m=+894.157558386" lastFinishedPulling="2026-03-20 15:52:55.04396962 +0000 UTC m=+904.466672471" observedRunningTime="2026-03-20 15:52:56.076035676 +0000 UTC m=+905.498738517" watchObservedRunningTime="2026-03-20 15:52:56.078196745 +0000 UTC m=+905.500899596" Mar 20 15:52:58 crc kubenswrapper[4813]: I0320 15:52:58.005039 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-b9bc87685-dshn8" event={"ID":"95615498-4862-4d52-9c91-6f2f4f17d4ba","Type":"ContainerStarted","Data":"f196ca3bccba21301fabc2be9d93d3cdd26392ff47ae3aba9b75ba32a0e6ef16"} Mar 20 15:52:58 crc kubenswrapper[4813]: I0320 15:52:58.005700 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:52:58 crc kubenswrapper[4813]: I0320 15:52:58.055010 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-b9bc87685-dshn8" podStartSLOduration=23.808373462 podStartE2EDuration="26.054984513s" podCreationTimestamp="2026-03-20 15:52:32 +0000 UTC" firstStartedPulling="2026-03-20 15:52:55.602525014 +0000 UTC m=+905.025227855" lastFinishedPulling="2026-03-20 15:52:57.849136075 +0000 UTC m=+907.271838906" observedRunningTime="2026-03-20 15:52:58.054165181 +0000 UTC m=+907.476868022" watchObservedRunningTime="2026-03-20 15:52:58.054984513 +0000 UTC m=+907.477687364" Mar 20 15:53:02 crc kubenswrapper[4813]: I0320 15:53:02.356805 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-b9bc87685-dshn8" Mar 20 15:53:03 crc kubenswrapper[4813]: I0320 15:53:03.842457 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:53:03 crc kubenswrapper[4813]: I0320 15:53:03.842564 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.668855 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c"] Mar 20 15:53:04 crc kubenswrapper[4813]: E0320 15:53:04.669478 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="extract-utilities" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.669527 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="extract-utilities" Mar 20 15:53:04 crc kubenswrapper[4813]: E0320 15:53:04.669556 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="registry-server" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.669568 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="registry-server" Mar 20 15:53:04 crc kubenswrapper[4813]: E0320 15:53:04.669586 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="extract-content" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.669599 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="extract-content" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.669768 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="dab66f52-4455-43e4-a3de-65d2611686d6" containerName="registry-server" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.671017 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.674946 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.682791 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c"] Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.779352 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-bundle\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.779400 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-util\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.779433 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rxr6\" (UniqueName: \"kubernetes.io/projected/0259c0a9-34c2-4000-982b-ae1122f345fc-kube-api-access-8rxr6\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.880569 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-bundle\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.880626 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-util\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.880649 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rxr6\" (UniqueName: \"kubernetes.io/projected/0259c0a9-34c2-4000-982b-ae1122f345fc-kube-api-access-8rxr6\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.881130 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-bundle\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.881191 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-util\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.901478 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rxr6\" (UniqueName: \"kubernetes.io/projected/0259c0a9-34c2-4000-982b-ae1122f345fc-kube-api-access-8rxr6\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:04 crc kubenswrapper[4813]: I0320 15:53:04.990624 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:05 crc kubenswrapper[4813]: I0320 15:53:05.209932 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c"] Mar 20 15:53:06 crc kubenswrapper[4813]: I0320 15:53:06.061959 4813 generic.go:334] "Generic (PLEG): container finished" podID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerID="d6dbb501891d072d609c74acf5926e841c3f101bc07f82c8bdd15206d5f38519" exitCode=0 Mar 20 15:53:06 crc kubenswrapper[4813]: I0320 15:53:06.062007 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" event={"ID":"0259c0a9-34c2-4000-982b-ae1122f345fc","Type":"ContainerDied","Data":"d6dbb501891d072d609c74acf5926e841c3f101bc07f82c8bdd15206d5f38519"} Mar 20 15:53:06 crc kubenswrapper[4813]: I0320 15:53:06.062048 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" event={"ID":"0259c0a9-34c2-4000-982b-ae1122f345fc","Type":"ContainerStarted","Data":"1c72e660417e1ed40a1f242d130dd72d8cd7730066ba885568b806c1941b46f9"} Mar 20 15:53:06 crc kubenswrapper[4813]: I0320 15:53:06.064156 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 15:53:06 crc kubenswrapper[4813]: I0320 15:53:06.088658 4813 scope.go:117] "RemoveContainer" containerID="8670450dbed5ca79cd5d2157d3e60f23acafcacfa9b814f0d6b6cada321bd55d" Mar 20 15:53:07 crc kubenswrapper[4813]: I0320 15:53:07.067787 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gpmgw_a76927f1-3a5b-4e2d-b1c2-b1cb3759b1c1/kube-multus/2.log" Mar 20 15:53:08 crc kubenswrapper[4813]: I0320 15:53:08.074806 4813 generic.go:334] "Generic (PLEG): container finished" podID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerID="0e95b4f7300e458e9926828e90e34fac2a5c6da89a2c75c74a23f84817469cc2" exitCode=0 Mar 20 15:53:08 crc kubenswrapper[4813]: I0320 15:53:08.074863 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" event={"ID":"0259c0a9-34c2-4000-982b-ae1122f345fc","Type":"ContainerDied","Data":"0e95b4f7300e458e9926828e90e34fac2a5c6da89a2c75c74a23f84817469cc2"} Mar 20 15:53:09 crc kubenswrapper[4813]: I0320 15:53:09.089575 4813 generic.go:334] "Generic (PLEG): container finished" podID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerID="8626a9f5b6f1043085b3b4c15b6fea8c144156338644e2abfc165614999daf32" exitCode=0 Mar 20 15:53:09 crc kubenswrapper[4813]: I0320 15:53:09.089625 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" event={"ID":"0259c0a9-34c2-4000-982b-ae1122f345fc","Type":"ContainerDied","Data":"8626a9f5b6f1043085b3b4c15b6fea8c144156338644e2abfc165614999daf32"} Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.347097 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.351005 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-bundle\") pod \"0259c0a9-34c2-4000-982b-ae1122f345fc\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.351064 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-util\") pod \"0259c0a9-34c2-4000-982b-ae1122f345fc\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.351114 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rxr6\" (UniqueName: \"kubernetes.io/projected/0259c0a9-34c2-4000-982b-ae1122f345fc-kube-api-access-8rxr6\") pod \"0259c0a9-34c2-4000-982b-ae1122f345fc\" (UID: \"0259c0a9-34c2-4000-982b-ae1122f345fc\") " Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.351784 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-bundle" (OuterVolumeSpecName: "bundle") pod "0259c0a9-34c2-4000-982b-ae1122f345fc" (UID: "0259c0a9-34c2-4000-982b-ae1122f345fc"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.357609 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0259c0a9-34c2-4000-982b-ae1122f345fc-kube-api-access-8rxr6" (OuterVolumeSpecName: "kube-api-access-8rxr6") pod "0259c0a9-34c2-4000-982b-ae1122f345fc" (UID: "0259c0a9-34c2-4000-982b-ae1122f345fc"). InnerVolumeSpecName "kube-api-access-8rxr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.372342 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-util" (OuterVolumeSpecName: "util") pod "0259c0a9-34c2-4000-982b-ae1122f345fc" (UID: "0259c0a9-34c2-4000-982b-ae1122f345fc"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.452231 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.452260 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0259c0a9-34c2-4000-982b-ae1122f345fc-util\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:10 crc kubenswrapper[4813]: I0320 15:53:10.452271 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rxr6\" (UniqueName: \"kubernetes.io/projected/0259c0a9-34c2-4000-982b-ae1122f345fc-kube-api-access-8rxr6\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:11 crc kubenswrapper[4813]: I0320 15:53:11.109707 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" event={"ID":"0259c0a9-34c2-4000-982b-ae1122f345fc","Type":"ContainerDied","Data":"1c72e660417e1ed40a1f242d130dd72d8cd7730066ba885568b806c1941b46f9"} Mar 20 15:53:11 crc kubenswrapper[4813]: I0320 15:53:11.109764 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c72e660417e1ed40a1f242d130dd72d8cd7730066ba885568b806c1941b46f9" Mar 20 15:53:11 crc kubenswrapper[4813]: I0320 15:53:11.109857 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.493842 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-796d4cfff4-s52xz"] Mar 20 15:53:13 crc kubenswrapper[4813]: E0320 15:53:13.494468 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerName="pull" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.494513 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerName="pull" Mar 20 15:53:13 crc kubenswrapper[4813]: E0320 15:53:13.494546 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerName="util" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.494557 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerName="util" Mar 20 15:53:13 crc kubenswrapper[4813]: E0320 15:53:13.494577 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerName="extract" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.494588 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerName="extract" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.494734 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0259c0a9-34c2-4000-982b-ae1122f345fc" containerName="extract" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.495333 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.503533 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.503769 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.504005 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-wp5rc" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.504895 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-796d4cfff4-s52xz"] Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.593455 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvtvg\" (UniqueName: \"kubernetes.io/projected/6fd819ab-3d0b-427b-b29c-bf57d746a6a3-kube-api-access-cvtvg\") pod \"nmstate-operator-796d4cfff4-s52xz\" (UID: \"6fd819ab-3d0b-427b-b29c-bf57d746a6a3\") " pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.694656 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvtvg\" (UniqueName: \"kubernetes.io/projected/6fd819ab-3d0b-427b-b29c-bf57d746a6a3-kube-api-access-cvtvg\") pod \"nmstate-operator-796d4cfff4-s52xz\" (UID: \"6fd819ab-3d0b-427b-b29c-bf57d746a6a3\") " pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.716769 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvtvg\" (UniqueName: \"kubernetes.io/projected/6fd819ab-3d0b-427b-b29c-bf57d746a6a3-kube-api-access-cvtvg\") pod \"nmstate-operator-796d4cfff4-s52xz\" (UID: \"6fd819ab-3d0b-427b-b29c-bf57d746a6a3\") " pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" Mar 20 15:53:13 crc kubenswrapper[4813]: I0320 15:53:13.816315 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" Mar 20 15:53:14 crc kubenswrapper[4813]: I0320 15:53:14.039762 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-796d4cfff4-s52xz"] Mar 20 15:53:14 crc kubenswrapper[4813]: W0320 15:53:14.061657 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fd819ab_3d0b_427b_b29c_bf57d746a6a3.slice/crio-3ea5d4c06235dfcabcf1d20b3bf25b3597c044dc0fdc93c5e826815414213009 WatchSource:0}: Error finding container 3ea5d4c06235dfcabcf1d20b3bf25b3597c044dc0fdc93c5e826815414213009: Status 404 returned error can't find the container with id 3ea5d4c06235dfcabcf1d20b3bf25b3597c044dc0fdc93c5e826815414213009 Mar 20 15:53:14 crc kubenswrapper[4813]: I0320 15:53:14.126949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" event={"ID":"6fd819ab-3d0b-427b-b29c-bf57d746a6a3","Type":"ContainerStarted","Data":"3ea5d4c06235dfcabcf1d20b3bf25b3597c044dc0fdc93c5e826815414213009"} Mar 20 15:53:17 crc kubenswrapper[4813]: I0320 15:53:17.149393 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" event={"ID":"6fd819ab-3d0b-427b-b29c-bf57d746a6a3","Type":"ContainerStarted","Data":"4c4c065eb0ad262a912c2d1ca0d0e93fe961fe454f79dcd7c52576a336d26095"} Mar 20 15:53:17 crc kubenswrapper[4813]: I0320 15:53:17.165564 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-796d4cfff4-s52xz" podStartSLOduration=1.5990971470000002 podStartE2EDuration="4.165549758s" podCreationTimestamp="2026-03-20 15:53:13 +0000 UTC" firstStartedPulling="2026-03-20 15:53:14.065368151 +0000 UTC m=+923.488070992" lastFinishedPulling="2026-03-20 15:53:16.631820772 +0000 UTC m=+926.054523603" observedRunningTime="2026-03-20 15:53:17.162856615 +0000 UTC m=+926.585559456" watchObservedRunningTime="2026-03-20 15:53:17.165549758 +0000 UTC m=+926.588252599" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.149109 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.150302 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.155693 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-6b685" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.160190 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.164142 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.164831 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.165772 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/980f1609-b4ad-464a-93cd-895066f63c92-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-xbp9m\" (UID: \"980f1609-b4ad-464a-93cd-895066f63c92\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.165814 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsgwb\" (UniqueName: \"kubernetes.io/projected/980f1609-b4ad-464a-93cd-895066f63c92-kube-api-access-fsgwb\") pod \"nmstate-webhook-5f558f5558-xbp9m\" (UID: \"980f1609-b4ad-464a-93cd-895066f63c92\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.165875 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m48wl\" (UniqueName: \"kubernetes.io/projected/474448b5-47ff-4f1b-a127-f3192ec0a283-kube-api-access-m48wl\") pod \"nmstate-metrics-9b8c8685d-xfs5s\" (UID: \"474448b5-47ff-4f1b-a127-f3192ec0a283\") " pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.169412 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.192716 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.208442 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-52hfr"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.209142 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.266790 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/980f1609-b4ad-464a-93cd-895066f63c92-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-xbp9m\" (UID: \"980f1609-b4ad-464a-93cd-895066f63c92\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.266841 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsgwb\" (UniqueName: \"kubernetes.io/projected/980f1609-b4ad-464a-93cd-895066f63c92-kube-api-access-fsgwb\") pod \"nmstate-webhook-5f558f5558-xbp9m\" (UID: \"980f1609-b4ad-464a-93cd-895066f63c92\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.266903 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m48wl\" (UniqueName: \"kubernetes.io/projected/474448b5-47ff-4f1b-a127-f3192ec0a283-kube-api-access-m48wl\") pod \"nmstate-metrics-9b8c8685d-xfs5s\" (UID: \"474448b5-47ff-4f1b-a127-f3192ec0a283\") " pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" Mar 20 15:53:19 crc kubenswrapper[4813]: E0320 15:53:19.267230 4813 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Mar 20 15:53:19 crc kubenswrapper[4813]: E0320 15:53:19.267289 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/980f1609-b4ad-464a-93cd-895066f63c92-tls-key-pair podName:980f1609-b4ad-464a-93cd-895066f63c92 nodeName:}" failed. No retries permitted until 2026-03-20 15:53:19.767274294 +0000 UTC m=+929.189977135 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/980f1609-b4ad-464a-93cd-895066f63c92-tls-key-pair") pod "nmstate-webhook-5f558f5558-xbp9m" (UID: "980f1609-b4ad-464a-93cd-895066f63c92") : secret "openshift-nmstate-webhook" not found Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.307610 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m48wl\" (UniqueName: \"kubernetes.io/projected/474448b5-47ff-4f1b-a127-f3192ec0a283-kube-api-access-m48wl\") pod \"nmstate-metrics-9b8c8685d-xfs5s\" (UID: \"474448b5-47ff-4f1b-a127-f3192ec0a283\") " pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.318305 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsgwb\" (UniqueName: \"kubernetes.io/projected/980f1609-b4ad-464a-93cd-895066f63c92-kube-api-access-fsgwb\") pod \"nmstate-webhook-5f558f5558-xbp9m\" (UID: \"980f1609-b4ad-464a-93cd-895066f63c92\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.368458 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-ovs-socket\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.368521 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6b9c\" (UniqueName: \"kubernetes.io/projected/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-kube-api-access-d6b9c\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.368561 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-dbus-socket\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.368715 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-nmstate-lock\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.441123 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.441820 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.461327 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-6wwmh" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.461346 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.461960 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.465983 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470787 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7vpv\" (UniqueName: \"kubernetes.io/projected/56e21629-308a-4212-ac84-81570cb43089-kube-api-access-k7vpv\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470833 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-nmstate-lock\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470903 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-ovs-socket\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470928 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6b9c\" (UniqueName: \"kubernetes.io/projected/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-kube-api-access-d6b9c\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-nmstate-lock\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470960 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e21629-308a-4212-ac84-81570cb43089-plugin-serving-cert\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470984 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-ovs-socket\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.470985 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-dbus-socket\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.471113 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/56e21629-308a-4212-ac84-81570cb43089-nginx-conf\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.471219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-dbus-socket\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.488691 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.503141 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6b9c\" (UniqueName: \"kubernetes.io/projected/b8bbc7f2-8378-46a1-9cc1-20c8366b22f4-kube-api-access-d6b9c\") pod \"nmstate-handler-52hfr\" (UID: \"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4\") " pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.526155 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.572102 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e21629-308a-4212-ac84-81570cb43089-plugin-serving-cert\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.572158 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/56e21629-308a-4212-ac84-81570cb43089-nginx-conf\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.572185 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7vpv\" (UniqueName: \"kubernetes.io/projected/56e21629-308a-4212-ac84-81570cb43089-kube-api-access-k7vpv\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.573754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/56e21629-308a-4212-ac84-81570cb43089-nginx-conf\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.577246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/56e21629-308a-4212-ac84-81570cb43089-plugin-serving-cert\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.592126 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7vpv\" (UniqueName: \"kubernetes.io/projected/56e21629-308a-4212-ac84-81570cb43089-kube-api-access-k7vpv\") pod \"nmstate-console-plugin-86f58fcf4-7shs2\" (UID: \"56e21629-308a-4212-ac84-81570cb43089\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.636094 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-cb4f878bb-p9q64"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.638426 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.641051 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-cb4f878bb-p9q64"] Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.673190 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-trusted-ca-bundle\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.673308 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-oauth-serving-cert\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.673343 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-config\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.673377 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-serving-cert\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.673404 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-service-ca\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.673427 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbxdl\" (UniqueName: \"kubernetes.io/projected/04185fdf-5c0c-46c8-8447-bb3225c8409b-kube-api-access-wbxdl\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.673465 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-oauth-config\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.693652 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s"] Mar 20 15:53:19 crc kubenswrapper[4813]: W0320 15:53:19.707162 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod474448b5_47ff_4f1b_a127_f3192ec0a283.slice/crio-6d462c243555ad3fff3fbe3b669018fbe46cceafd4ebe3b3423c2ba5872a8a0b WatchSource:0}: Error finding container 6d462c243555ad3fff3fbe3b669018fbe46cceafd4ebe3b3423c2ba5872a8a0b: Status 404 returned error can't find the container with id 6d462c243555ad3fff3fbe3b669018fbe46cceafd4ebe3b3423c2ba5872a8a0b Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774192 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-serving-cert\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774241 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-service-ca\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774260 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbxdl\" (UniqueName: \"kubernetes.io/projected/04185fdf-5c0c-46c8-8447-bb3225c8409b-kube-api-access-wbxdl\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774294 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-oauth-config\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774321 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-trusted-ca-bundle\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774353 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/980f1609-b4ad-464a-93cd-895066f63c92-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-xbp9m\" (UID: \"980f1609-b4ad-464a-93cd-895066f63c92\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774384 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-oauth-serving-cert\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.774886 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-config\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.775288 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-service-ca\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.775288 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-config\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.775346 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-oauth-serving-cert\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.775470 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-trusted-ca-bundle\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.779946 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-serving-cert\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.780718 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-oauth-config\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.780980 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/980f1609-b4ad-464a-93cd-895066f63c92-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-xbp9m\" (UID: \"980f1609-b4ad-464a-93cd-895066f63c92\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.790733 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbxdl\" (UniqueName: \"kubernetes.io/projected/04185fdf-5c0c-46c8-8447-bb3225c8409b-kube-api-access-wbxdl\") pod \"console-cb4f878bb-p9q64\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.857294 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" Mar 20 15:53:19 crc kubenswrapper[4813]: I0320 15:53:19.974800 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:20 crc kubenswrapper[4813]: I0320 15:53:20.052508 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2"] Mar 20 15:53:20 crc kubenswrapper[4813]: W0320 15:53:20.054775 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56e21629_308a_4212_ac84_81570cb43089.slice/crio-7d9197bb995391f9a6c61f09e215fbdca4a9b95ac71e9fcb1cf8597618c508f4 WatchSource:0}: Error finding container 7d9197bb995391f9a6c61f09e215fbdca4a9b95ac71e9fcb1cf8597618c508f4: Status 404 returned error can't find the container with id 7d9197bb995391f9a6c61f09e215fbdca4a9b95ac71e9fcb1cf8597618c508f4 Mar 20 15:53:20 crc kubenswrapper[4813]: I0320 15:53:20.080715 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:20 crc kubenswrapper[4813]: I0320 15:53:20.167778 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-52hfr" event={"ID":"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4","Type":"ContainerStarted","Data":"9f315b0c307ddfb296474aaa858f9f726a8bfc5aaae43e1803a35324366b9b20"} Mar 20 15:53:20 crc kubenswrapper[4813]: I0320 15:53:20.168857 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" event={"ID":"474448b5-47ff-4f1b-a127-f3192ec0a283","Type":"ContainerStarted","Data":"6d462c243555ad3fff3fbe3b669018fbe46cceafd4ebe3b3423c2ba5872a8a0b"} Mar 20 15:53:20 crc kubenswrapper[4813]: I0320 15:53:20.169632 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" event={"ID":"56e21629-308a-4212-ac84-81570cb43089","Type":"ContainerStarted","Data":"7d9197bb995391f9a6c61f09e215fbdca4a9b95ac71e9fcb1cf8597618c508f4"} Mar 20 15:53:20 crc kubenswrapper[4813]: I0320 15:53:20.273016 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m"] Mar 20 15:53:20 crc kubenswrapper[4813]: W0320 15:53:20.277610 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod980f1609_b4ad_464a_93cd_895066f63c92.slice/crio-dcf5d883608b607a4f6ddfea5c011a78360d6f7efa59f60c2ab82abce022cd17 WatchSource:0}: Error finding container dcf5d883608b607a4f6ddfea5c011a78360d6f7efa59f60c2ab82abce022cd17: Status 404 returned error can't find the container with id dcf5d883608b607a4f6ddfea5c011a78360d6f7efa59f60c2ab82abce022cd17 Mar 20 15:53:20 crc kubenswrapper[4813]: I0320 15:53:20.468648 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-cb4f878bb-p9q64"] Mar 20 15:53:20 crc kubenswrapper[4813]: W0320 15:53:20.480668 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04185fdf_5c0c_46c8_8447_bb3225c8409b.slice/crio-47ff0156181fd904fba5f860238a69e83b358c03fc21424804e3253cad69c94b WatchSource:0}: Error finding container 47ff0156181fd904fba5f860238a69e83b358c03fc21424804e3253cad69c94b: Status 404 returned error can't find the container with id 47ff0156181fd904fba5f860238a69e83b358c03fc21424804e3253cad69c94b Mar 20 15:53:21 crc kubenswrapper[4813]: I0320 15:53:21.175005 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-cb4f878bb-p9q64" event={"ID":"04185fdf-5c0c-46c8-8447-bb3225c8409b","Type":"ContainerStarted","Data":"9086b30e50e46c8e5a3e4999f23138fbee4200a6b043bb62e692f238e2dc4ad5"} Mar 20 15:53:21 crc kubenswrapper[4813]: I0320 15:53:21.175049 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-cb4f878bb-p9q64" event={"ID":"04185fdf-5c0c-46c8-8447-bb3225c8409b","Type":"ContainerStarted","Data":"47ff0156181fd904fba5f860238a69e83b358c03fc21424804e3253cad69c94b"} Mar 20 15:53:21 crc kubenswrapper[4813]: I0320 15:53:21.176677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" event={"ID":"980f1609-b4ad-464a-93cd-895066f63c92","Type":"ContainerStarted","Data":"dcf5d883608b607a4f6ddfea5c011a78360d6f7efa59f60c2ab82abce022cd17"} Mar 20 15:53:21 crc kubenswrapper[4813]: I0320 15:53:21.287439 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-cb4f878bb-p9q64" podStartSLOduration=2.2874234319999998 podStartE2EDuration="2.287423432s" podCreationTimestamp="2026-03-20 15:53:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:53:21.195747501 +0000 UTC m=+930.618450342" watchObservedRunningTime="2026-03-20 15:53:21.287423432 +0000 UTC m=+930.710126273" Mar 20 15:53:25 crc kubenswrapper[4813]: I0320 15:53:25.203371 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" event={"ID":"474448b5-47ff-4f1b-a127-f3192ec0a283","Type":"ContainerStarted","Data":"f5c5422c14e925706ec12e81ac48875764ebd96e62bb30f45449538284d73609"} Mar 20 15:53:25 crc kubenswrapper[4813]: I0320 15:53:25.204414 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" event={"ID":"980f1609-b4ad-464a-93cd-895066f63c92","Type":"ContainerStarted","Data":"be4c8a3142444edb66ae63bf4be09b189bbf2b873d07587fb2d4d826f37fdf65"} Mar 20 15:53:25 crc kubenswrapper[4813]: I0320 15:53:25.204536 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:25 crc kubenswrapper[4813]: I0320 15:53:25.205515 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-52hfr" event={"ID":"b8bbc7f2-8378-46a1-9cc1-20c8366b22f4","Type":"ContainerStarted","Data":"8fa3270597b22921251a6e4c7ddde2dadf904fb526222e9c0d27231a0d3f0155"} Mar 20 15:53:25 crc kubenswrapper[4813]: I0320 15:53:25.205679 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:25 crc kubenswrapper[4813]: I0320 15:53:25.222944 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" podStartSLOduration=2.187240182 podStartE2EDuration="6.222918413s" podCreationTimestamp="2026-03-20 15:53:19 +0000 UTC" firstStartedPulling="2026-03-20 15:53:20.281284294 +0000 UTC m=+929.703987145" lastFinishedPulling="2026-03-20 15:53:24.316962535 +0000 UTC m=+933.739665376" observedRunningTime="2026-03-20 15:53:25.220630601 +0000 UTC m=+934.643333482" watchObservedRunningTime="2026-03-20 15:53:25.222918413 +0000 UTC m=+934.645621284" Mar 20 15:53:25 crc kubenswrapper[4813]: I0320 15:53:25.254072 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-52hfr" podStartSLOduration=1.5408842310000002 podStartE2EDuration="6.254045542s" podCreationTimestamp="2026-03-20 15:53:19 +0000 UTC" firstStartedPulling="2026-03-20 15:53:19.566070297 +0000 UTC m=+928.988773138" lastFinishedPulling="2026-03-20 15:53:24.279231598 +0000 UTC m=+933.701934449" observedRunningTime="2026-03-20 15:53:25.243865647 +0000 UTC m=+934.666568518" watchObservedRunningTime="2026-03-20 15:53:25.254045542 +0000 UTC m=+934.676748423" Mar 20 15:53:26 crc kubenswrapper[4813]: I0320 15:53:26.214896 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" event={"ID":"56e21629-308a-4212-ac84-81570cb43089","Type":"ContainerStarted","Data":"ae5999179ef5a0dbc4fcb8781e88b4f1efe7626f3dc7a2d0d073351fd538ed5d"} Mar 20 15:53:26 crc kubenswrapper[4813]: I0320 15:53:26.238054 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-7shs2" podStartSLOduration=2.096409983 podStartE2EDuration="7.238035942s" podCreationTimestamp="2026-03-20 15:53:19 +0000 UTC" firstStartedPulling="2026-03-20 15:53:20.058013376 +0000 UTC m=+929.480716217" lastFinishedPulling="2026-03-20 15:53:25.199639335 +0000 UTC m=+934.622342176" observedRunningTime="2026-03-20 15:53:26.231195987 +0000 UTC m=+935.653898828" watchObservedRunningTime="2026-03-20 15:53:26.238035942 +0000 UTC m=+935.660738783" Mar 20 15:53:27 crc kubenswrapper[4813]: I0320 15:53:27.222938 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" event={"ID":"474448b5-47ff-4f1b-a127-f3192ec0a283","Type":"ContainerStarted","Data":"0309d218ba4c98ade75d626f346d21ccac95ac47193b3fab954bf91b02e9276e"} Mar 20 15:53:29 crc kubenswrapper[4813]: I0320 15:53:29.550393 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-52hfr" Mar 20 15:53:29 crc kubenswrapper[4813]: I0320 15:53:29.573189 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-xfs5s" podStartSLOduration=3.402689013 podStartE2EDuration="10.573172754s" podCreationTimestamp="2026-03-20 15:53:19 +0000 UTC" firstStartedPulling="2026-03-20 15:53:19.70976617 +0000 UTC m=+929.132469011" lastFinishedPulling="2026-03-20 15:53:26.880249901 +0000 UTC m=+936.302952752" observedRunningTime="2026-03-20 15:53:27.244139869 +0000 UTC m=+936.666842720" watchObservedRunningTime="2026-03-20 15:53:29.573172754 +0000 UTC m=+938.995875595" Mar 20 15:53:29 crc kubenswrapper[4813]: I0320 15:53:29.975733 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:29 crc kubenswrapper[4813]: I0320 15:53:29.975840 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:29 crc kubenswrapper[4813]: I0320 15:53:29.982728 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:30 crc kubenswrapper[4813]: I0320 15:53:30.249546 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:53:30 crc kubenswrapper[4813]: I0320 15:53:30.309282 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-gsjbb"] Mar 20 15:53:33 crc kubenswrapper[4813]: I0320 15:53:33.842285 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:53:33 crc kubenswrapper[4813]: I0320 15:53:33.842839 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:53:33 crc kubenswrapper[4813]: I0320 15:53:33.842928 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:53:33 crc kubenswrapper[4813]: I0320 15:53:33.843907 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c4e8d3a270bd3cde9b5259a1a03876e8e2c58d54ad599c8e659fb3e3d9e094f9"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 15:53:33 crc kubenswrapper[4813]: I0320 15:53:33.844014 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://c4e8d3a270bd3cde9b5259a1a03876e8e2c58d54ad599c8e659fb3e3d9e094f9" gracePeriod=600 Mar 20 15:53:34 crc kubenswrapper[4813]: I0320 15:53:34.269253 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="c4e8d3a270bd3cde9b5259a1a03876e8e2c58d54ad599c8e659fb3e3d9e094f9" exitCode=0 Mar 20 15:53:34 crc kubenswrapper[4813]: I0320 15:53:34.269418 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"c4e8d3a270bd3cde9b5259a1a03876e8e2c58d54ad599c8e659fb3e3d9e094f9"} Mar 20 15:53:34 crc kubenswrapper[4813]: I0320 15:53:34.269697 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"58e6bc2a6d9001d676e2ad6e8e29a8d0759512d8590133534bf74186af5900d4"} Mar 20 15:53:34 crc kubenswrapper[4813]: I0320 15:53:34.269729 4813 scope.go:117] "RemoveContainer" containerID="73a01997359872a7d6c06593a97cbca955cca2ef12575691de07e76008cef282" Mar 20 15:53:40 crc kubenswrapper[4813]: I0320 15:53:40.092225 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f558f5558-xbp9m" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.470779 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk"] Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.472234 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.474137 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.484027 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk"] Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.524020 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtshq\" (UniqueName: \"kubernetes.io/projected/60e1c3ce-b711-4bac-afd3-60804a46154f-kube-api-access-wtshq\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.524074 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-bundle\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.524136 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-util\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.625811 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-bundle\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.625884 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-util\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.625955 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtshq\" (UniqueName: \"kubernetes.io/projected/60e1c3ce-b711-4bac-afd3-60804a46154f-kube-api-access-wtshq\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.626344 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-bundle\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.626665 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-util\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.645036 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtshq\" (UniqueName: \"kubernetes.io/projected/60e1c3ce-b711-4bac-afd3-60804a46154f-kube-api-access-wtshq\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:53 crc kubenswrapper[4813]: I0320 15:53:53.794374 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:54 crc kubenswrapper[4813]: I0320 15:53:54.193605 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk"] Mar 20 15:53:54 crc kubenswrapper[4813]: I0320 15:53:54.423019 4813 generic.go:334] "Generic (PLEG): container finished" podID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerID="843eab178107003677ff7b430da4a77dbf6772736528b71abf00fe2bd9e9b95f" exitCode=0 Mar 20 15:53:54 crc kubenswrapper[4813]: I0320 15:53:54.423060 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" event={"ID":"60e1c3ce-b711-4bac-afd3-60804a46154f","Type":"ContainerDied","Data":"843eab178107003677ff7b430da4a77dbf6772736528b71abf00fe2bd9e9b95f"} Mar 20 15:53:54 crc kubenswrapper[4813]: I0320 15:53:54.423084 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" event={"ID":"60e1c3ce-b711-4bac-afd3-60804a46154f","Type":"ContainerStarted","Data":"31db0581bc65108dc09bd09839acd48736a30daa6bcda593993bee11d96631fa"} Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.358697 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-gsjbb" podUID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" containerName="console" containerID="cri-o://464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38" gracePeriod=15 Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.452221 4813 patch_prober.go:28] interesting pod/console-f9d7485db-gsjbb container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.452328 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-gsjbb" podUID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.833888 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-gsjbb_97d209ef-db8b-4a75-bd06-bf1aea3a81dc/console/0.log" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.833963 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.860827 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-trusted-ca-bundle\") pod \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.860881 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-oauth-serving-cert\") pod \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.860913 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-oauth-config\") pod \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.860970 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-serving-cert\") pod \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.860992 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-service-ca\") pod \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.861077 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-config\") pod \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.861102 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g85s\" (UniqueName: \"kubernetes.io/projected/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-kube-api-access-8g85s\") pod \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\" (UID: \"97d209ef-db8b-4a75-bd06-bf1aea3a81dc\") " Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.861720 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "97d209ef-db8b-4a75-bd06-bf1aea3a81dc" (UID: "97d209ef-db8b-4a75-bd06-bf1aea3a81dc"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.862826 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-service-ca" (OuterVolumeSpecName: "service-ca") pod "97d209ef-db8b-4a75-bd06-bf1aea3a81dc" (UID: "97d209ef-db8b-4a75-bd06-bf1aea3a81dc"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.863615 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-config" (OuterVolumeSpecName: "console-config") pod "97d209ef-db8b-4a75-bd06-bf1aea3a81dc" (UID: "97d209ef-db8b-4a75-bd06-bf1aea3a81dc"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.863725 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "97d209ef-db8b-4a75-bd06-bf1aea3a81dc" (UID: "97d209ef-db8b-4a75-bd06-bf1aea3a81dc"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.870722 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "97d209ef-db8b-4a75-bd06-bf1aea3a81dc" (UID: "97d209ef-db8b-4a75-bd06-bf1aea3a81dc"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.875944 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-kube-api-access-8g85s" (OuterVolumeSpecName: "kube-api-access-8g85s") pod "97d209ef-db8b-4a75-bd06-bf1aea3a81dc" (UID: "97d209ef-db8b-4a75-bd06-bf1aea3a81dc"). InnerVolumeSpecName "kube-api-access-8g85s". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.876948 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "97d209ef-db8b-4a75-bd06-bf1aea3a81dc" (UID: "97d209ef-db8b-4a75-bd06-bf1aea3a81dc"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.962217 4813 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.962243 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g85s\" (UniqueName: \"kubernetes.io/projected/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-kube-api-access-8g85s\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.962254 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.962264 4813 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.962272 4813 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-oauth-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.962281 4813 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-console-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:55 crc kubenswrapper[4813]: I0320 15:53:55.962289 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97d209ef-db8b-4a75-bd06-bf1aea3a81dc-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.439761 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-gsjbb_97d209ef-db8b-4a75-bd06-bf1aea3a81dc/console/0.log" Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.440158 4813 generic.go:334] "Generic (PLEG): container finished" podID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" containerID="464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38" exitCode=2 Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.440241 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gsjbb" Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.440240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gsjbb" event={"ID":"97d209ef-db8b-4a75-bd06-bf1aea3a81dc","Type":"ContainerDied","Data":"464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38"} Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.440384 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gsjbb" event={"ID":"97d209ef-db8b-4a75-bd06-bf1aea3a81dc","Type":"ContainerDied","Data":"970cf1ad540f5bdc5e2594cc352df34c50b92fd323081229a01f2bde0c5cdea8"} Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.440415 4813 scope.go:117] "RemoveContainer" containerID="464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38" Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.443535 4813 generic.go:334] "Generic (PLEG): container finished" podID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerID="8aacfd8e7002b55c2cebca52fc415cbc5ce6e1d8f3aeae37f6e9c7c1d365c025" exitCode=0 Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.443587 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" event={"ID":"60e1c3ce-b711-4bac-afd3-60804a46154f","Type":"ContainerDied","Data":"8aacfd8e7002b55c2cebca52fc415cbc5ce6e1d8f3aeae37f6e9c7c1d365c025"} Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.485307 4813 scope.go:117] "RemoveContainer" containerID="464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38" Mar 20 15:53:56 crc kubenswrapper[4813]: E0320 15:53:56.487050 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38\": container with ID starting with 464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38 not found: ID does not exist" containerID="464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38" Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.487122 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38"} err="failed to get container status \"464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38\": rpc error: code = NotFound desc = could not find container \"464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38\": container with ID starting with 464886cd3493385255b7a1bc00c27796afe97812e44348b5e6047964c1ad5b38 not found: ID does not exist" Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.488909 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-gsjbb"] Mar 20 15:53:56 crc kubenswrapper[4813]: I0320 15:53:56.494586 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-gsjbb"] Mar 20 15:53:57 crc kubenswrapper[4813]: I0320 15:53:57.279881 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" path="/var/lib/kubelet/pods/97d209ef-db8b-4a75-bd06-bf1aea3a81dc/volumes" Mar 20 15:53:57 crc kubenswrapper[4813]: I0320 15:53:57.452129 4813 generic.go:334] "Generic (PLEG): container finished" podID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerID="e99d6ea91d188a3ff4e212d8c3478b425ec1b4c3525efd0ad46d45006a8f893e" exitCode=0 Mar 20 15:53:57 crc kubenswrapper[4813]: I0320 15:53:57.452227 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" event={"ID":"60e1c3ce-b711-4bac-afd3-60804a46154f","Type":"ContainerDied","Data":"e99d6ea91d188a3ff4e212d8c3478b425ec1b4c3525efd0ad46d45006a8f893e"} Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.727770 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.801006 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-bundle\") pod \"60e1c3ce-b711-4bac-afd3-60804a46154f\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.801061 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtshq\" (UniqueName: \"kubernetes.io/projected/60e1c3ce-b711-4bac-afd3-60804a46154f-kube-api-access-wtshq\") pod \"60e1c3ce-b711-4bac-afd3-60804a46154f\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.801119 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-util\") pod \"60e1c3ce-b711-4bac-afd3-60804a46154f\" (UID: \"60e1c3ce-b711-4bac-afd3-60804a46154f\") " Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.801977 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-bundle" (OuterVolumeSpecName: "bundle") pod "60e1c3ce-b711-4bac-afd3-60804a46154f" (UID: "60e1c3ce-b711-4bac-afd3-60804a46154f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.806919 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60e1c3ce-b711-4bac-afd3-60804a46154f-kube-api-access-wtshq" (OuterVolumeSpecName: "kube-api-access-wtshq") pod "60e1c3ce-b711-4bac-afd3-60804a46154f" (UID: "60e1c3ce-b711-4bac-afd3-60804a46154f"). InnerVolumeSpecName "kube-api-access-wtshq". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.814948 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-util" (OuterVolumeSpecName: "util") pod "60e1c3ce-b711-4bac-afd3-60804a46154f" (UID: "60e1c3ce-b711-4bac-afd3-60804a46154f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.902816 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-util\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.902857 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/60e1c3ce-b711-4bac-afd3-60804a46154f-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:58 crc kubenswrapper[4813]: I0320 15:53:58.902870 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtshq\" (UniqueName: \"kubernetes.io/projected/60e1c3ce-b711-4bac-afd3-60804a46154f-kube-api-access-wtshq\") on node \"crc\" DevicePath \"\"" Mar 20 15:53:59 crc kubenswrapper[4813]: I0320 15:53:59.475531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" event={"ID":"60e1c3ce-b711-4bac-afd3-60804a46154f","Type":"ContainerDied","Data":"31db0581bc65108dc09bd09839acd48736a30daa6bcda593993bee11d96631fa"} Mar 20 15:53:59 crc kubenswrapper[4813]: I0320 15:53:59.475589 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31db0581bc65108dc09bd09839acd48736a30daa6bcda593993bee11d96631fa" Mar 20 15:53:59 crc kubenswrapper[4813]: I0320 15:53:59.475598 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.148523 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567034-tvrfq"] Mar 20 15:54:00 crc kubenswrapper[4813]: E0320 15:54:00.149050 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" containerName="console" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.149150 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" containerName="console" Mar 20 15:54:00 crc kubenswrapper[4813]: E0320 15:54:00.149179 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerName="util" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.149190 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerName="util" Mar 20 15:54:00 crc kubenswrapper[4813]: E0320 15:54:00.149203 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerName="extract" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.149212 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerName="extract" Mar 20 15:54:00 crc kubenswrapper[4813]: E0320 15:54:00.149224 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerName="pull" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.149233 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerName="pull" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.149413 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="97d209ef-db8b-4a75-bd06-bf1aea3a81dc" containerName="console" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.149435 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="60e1c3ce-b711-4bac-afd3-60804a46154f" containerName="extract" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.150046 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567034-tvrfq" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.152727 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.153846 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.155088 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.166415 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567034-tvrfq"] Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.221428 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz8j2\" (UniqueName: \"kubernetes.io/projected/bde898e2-d1f5-45dc-ba44-1cc4620152dc-kube-api-access-mz8j2\") pod \"auto-csr-approver-29567034-tvrfq\" (UID: \"bde898e2-d1f5-45dc-ba44-1cc4620152dc\") " pod="openshift-infra/auto-csr-approver-29567034-tvrfq" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.322342 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz8j2\" (UniqueName: \"kubernetes.io/projected/bde898e2-d1f5-45dc-ba44-1cc4620152dc-kube-api-access-mz8j2\") pod \"auto-csr-approver-29567034-tvrfq\" (UID: \"bde898e2-d1f5-45dc-ba44-1cc4620152dc\") " pod="openshift-infra/auto-csr-approver-29567034-tvrfq" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.496635 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz8j2\" (UniqueName: \"kubernetes.io/projected/bde898e2-d1f5-45dc-ba44-1cc4620152dc-kube-api-access-mz8j2\") pod \"auto-csr-approver-29567034-tvrfq\" (UID: \"bde898e2-d1f5-45dc-ba44-1cc4620152dc\") " pod="openshift-infra/auto-csr-approver-29567034-tvrfq" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.768184 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567034-tvrfq" Mar 20 15:54:00 crc kubenswrapper[4813]: I0320 15:54:00.993406 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567034-tvrfq"] Mar 20 15:54:01 crc kubenswrapper[4813]: W0320 15:54:01.000541 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbde898e2_d1f5_45dc_ba44_1cc4620152dc.slice/crio-f7b5d01d6e3e596cad4418c412503114d123d3f96a8b36adb70fde2f58754a51 WatchSource:0}: Error finding container f7b5d01d6e3e596cad4418c412503114d123d3f96a8b36adb70fde2f58754a51: Status 404 returned error can't find the container with id f7b5d01d6e3e596cad4418c412503114d123d3f96a8b36adb70fde2f58754a51 Mar 20 15:54:01 crc kubenswrapper[4813]: I0320 15:54:01.499210 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567034-tvrfq" event={"ID":"bde898e2-d1f5-45dc-ba44-1cc4620152dc","Type":"ContainerStarted","Data":"f7b5d01d6e3e596cad4418c412503114d123d3f96a8b36adb70fde2f58754a51"} Mar 20 15:54:03 crc kubenswrapper[4813]: I0320 15:54:03.514913 4813 generic.go:334] "Generic (PLEG): container finished" podID="bde898e2-d1f5-45dc-ba44-1cc4620152dc" containerID="550cb2b0850678e616e2e311c696d7d9b678adc681667f5955489f0caf52fa5c" exitCode=0 Mar 20 15:54:03 crc kubenswrapper[4813]: I0320 15:54:03.515656 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567034-tvrfq" event={"ID":"bde898e2-d1f5-45dc-ba44-1cc4620152dc","Type":"ContainerDied","Data":"550cb2b0850678e616e2e311c696d7d9b678adc681667f5955489f0caf52fa5c"} Mar 20 15:54:04 crc kubenswrapper[4813]: I0320 15:54:04.803525 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567034-tvrfq" Mar 20 15:54:04 crc kubenswrapper[4813]: I0320 15:54:04.923677 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mz8j2\" (UniqueName: \"kubernetes.io/projected/bde898e2-d1f5-45dc-ba44-1cc4620152dc-kube-api-access-mz8j2\") pod \"bde898e2-d1f5-45dc-ba44-1cc4620152dc\" (UID: \"bde898e2-d1f5-45dc-ba44-1cc4620152dc\") " Mar 20 15:54:04 crc kubenswrapper[4813]: I0320 15:54:04.932808 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bde898e2-d1f5-45dc-ba44-1cc4620152dc-kube-api-access-mz8j2" (OuterVolumeSpecName: "kube-api-access-mz8j2") pod "bde898e2-d1f5-45dc-ba44-1cc4620152dc" (UID: "bde898e2-d1f5-45dc-ba44-1cc4620152dc"). InnerVolumeSpecName "kube-api-access-mz8j2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:54:05 crc kubenswrapper[4813]: I0320 15:54:05.025513 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mz8j2\" (UniqueName: \"kubernetes.io/projected/bde898e2-d1f5-45dc-ba44-1cc4620152dc-kube-api-access-mz8j2\") on node \"crc\" DevicePath \"\"" Mar 20 15:54:05 crc kubenswrapper[4813]: I0320 15:54:05.529241 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567034-tvrfq" event={"ID":"bde898e2-d1f5-45dc-ba44-1cc4620152dc","Type":"ContainerDied","Data":"f7b5d01d6e3e596cad4418c412503114d123d3f96a8b36adb70fde2f58754a51"} Mar 20 15:54:05 crc kubenswrapper[4813]: I0320 15:54:05.529571 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7b5d01d6e3e596cad4418c412503114d123d3f96a8b36adb70fde2f58754a51" Mar 20 15:54:05 crc kubenswrapper[4813]: I0320 15:54:05.529633 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567034-tvrfq" Mar 20 15:54:05 crc kubenswrapper[4813]: I0320 15:54:05.863234 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567028-m4wlb"] Mar 20 15:54:05 crc kubenswrapper[4813]: I0320 15:54:05.866968 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567028-m4wlb"] Mar 20 15:54:07 crc kubenswrapper[4813]: I0320 15:54:07.274670 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43e20dfc-363f-43d2-9f6b-86ba5209e70b" path="/var/lib/kubelet/pods/43e20dfc-363f-43d2-9f6b-86ba5209e70b/volumes" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.759493 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-bfc448998-db8w6"] Mar 20 15:54:08 crc kubenswrapper[4813]: E0320 15:54:08.760071 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bde898e2-d1f5-45dc-ba44-1cc4620152dc" containerName="oc" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.760086 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bde898e2-d1f5-45dc-ba44-1cc4620152dc" containerName="oc" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.760211 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bde898e2-d1f5-45dc-ba44-1cc4620152dc" containerName="oc" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.760689 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.762619 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.762914 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.763318 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.764126 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.764848 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-zz2jf" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.777584 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-bfc448998-db8w6"] Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.903553 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-apiservice-cert\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.903620 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-webhook-cert\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:08 crc kubenswrapper[4813]: I0320 15:54:08.903660 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww859\" (UniqueName: \"kubernetes.io/projected/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-kube-api-access-ww859\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.004712 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww859\" (UniqueName: \"kubernetes.io/projected/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-kube-api-access-ww859\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.004823 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-apiservice-cert\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.004853 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-webhook-cert\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.009924 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-apiservice-cert\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.011568 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-webhook-cert\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.025187 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww859\" (UniqueName: \"kubernetes.io/projected/28b6ff98-fd32-44bd-8ab0-9395fa76e6f8-kube-api-access-ww859\") pod \"metallb-operator-controller-manager-bfc448998-db8w6\" (UID: \"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8\") " pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.076421 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.093707 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s"] Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.094559 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.097850 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.098060 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-5g6mf" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.098217 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.108610 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s"] Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.207043 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5673dcee-fdfe-4f75-9dca-b35a64c13bea-apiservice-cert\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.207372 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5673dcee-fdfe-4f75-9dca-b35a64c13bea-webhook-cert\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.207403 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnhd8\" (UniqueName: \"kubernetes.io/projected/5673dcee-fdfe-4f75-9dca-b35a64c13bea-kube-api-access-nnhd8\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.308282 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5673dcee-fdfe-4f75-9dca-b35a64c13bea-apiservice-cert\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.308375 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5673dcee-fdfe-4f75-9dca-b35a64c13bea-webhook-cert\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.308414 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnhd8\" (UniqueName: \"kubernetes.io/projected/5673dcee-fdfe-4f75-9dca-b35a64c13bea-kube-api-access-nnhd8\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.312216 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5673dcee-fdfe-4f75-9dca-b35a64c13bea-apiservice-cert\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.313976 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5673dcee-fdfe-4f75-9dca-b35a64c13bea-webhook-cert\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.328326 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnhd8\" (UniqueName: \"kubernetes.io/projected/5673dcee-fdfe-4f75-9dca-b35a64c13bea-kube-api-access-nnhd8\") pod \"metallb-operator-webhook-server-795784bfcc-vcd2s\" (UID: \"5673dcee-fdfe-4f75-9dca-b35a64c13bea\") " pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.444089 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.576957 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-bfc448998-db8w6"] Mar 20 15:54:09 crc kubenswrapper[4813]: W0320 15:54:09.584688 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28b6ff98_fd32_44bd_8ab0_9395fa76e6f8.slice/crio-bef290e1fa0ab626d80eac13b6d8226723a2b8d3948f650545aec92fd0967d13 WatchSource:0}: Error finding container bef290e1fa0ab626d80eac13b6d8226723a2b8d3948f650545aec92fd0967d13: Status 404 returned error can't find the container with id bef290e1fa0ab626d80eac13b6d8226723a2b8d3948f650545aec92fd0967d13 Mar 20 15:54:09 crc kubenswrapper[4813]: I0320 15:54:09.893405 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s"] Mar 20 15:54:09 crc kubenswrapper[4813]: W0320 15:54:09.896325 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5673dcee_fdfe_4f75_9dca_b35a64c13bea.slice/crio-24d5f58ea1eba8668d1af4795059184e3b5d8ef6b6a239dc184b0e6ed64ea7d2 WatchSource:0}: Error finding container 24d5f58ea1eba8668d1af4795059184e3b5d8ef6b6a239dc184b0e6ed64ea7d2: Status 404 returned error can't find the container with id 24d5f58ea1eba8668d1af4795059184e3b5d8ef6b6a239dc184b0e6ed64ea7d2 Mar 20 15:54:10 crc kubenswrapper[4813]: I0320 15:54:10.560110 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" event={"ID":"5673dcee-fdfe-4f75-9dca-b35a64c13bea","Type":"ContainerStarted","Data":"24d5f58ea1eba8668d1af4795059184e3b5d8ef6b6a239dc184b0e6ed64ea7d2"} Mar 20 15:54:10 crc kubenswrapper[4813]: I0320 15:54:10.561162 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" event={"ID":"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8","Type":"ContainerStarted","Data":"bef290e1fa0ab626d80eac13b6d8226723a2b8d3948f650545aec92fd0967d13"} Mar 20 15:54:15 crc kubenswrapper[4813]: I0320 15:54:15.602786 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" event={"ID":"5673dcee-fdfe-4f75-9dca-b35a64c13bea","Type":"ContainerStarted","Data":"b41f25fe224fb7e90d2fb8f11c6012af2c817d2530fbddc17f5db0282293be86"} Mar 20 15:54:15 crc kubenswrapper[4813]: I0320 15:54:15.603329 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:15 crc kubenswrapper[4813]: I0320 15:54:15.604214 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" event={"ID":"28b6ff98-fd32-44bd-8ab0-9395fa76e6f8","Type":"ContainerStarted","Data":"49647e85b7956421a2900a6aa22609b1e5a870fe72aa6ee7fda09220ac7a2e21"} Mar 20 15:54:15 crc kubenswrapper[4813]: I0320 15:54:15.604506 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:15 crc kubenswrapper[4813]: I0320 15:54:15.622766 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" podStartSLOduration=1.715851899 podStartE2EDuration="6.622744424s" podCreationTimestamp="2026-03-20 15:54:09 +0000 UTC" firstStartedPulling="2026-03-20 15:54:09.900357062 +0000 UTC m=+979.323059903" lastFinishedPulling="2026-03-20 15:54:14.807249587 +0000 UTC m=+984.229952428" observedRunningTime="2026-03-20 15:54:15.619885518 +0000 UTC m=+985.042588369" watchObservedRunningTime="2026-03-20 15:54:15.622744424 +0000 UTC m=+985.045447275" Mar 20 15:54:15 crc kubenswrapper[4813]: I0320 15:54:15.645208 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" podStartSLOduration=2.438729118 podStartE2EDuration="7.645187097s" podCreationTimestamp="2026-03-20 15:54:08 +0000 UTC" firstStartedPulling="2026-03-20 15:54:09.586875185 +0000 UTC m=+979.009578026" lastFinishedPulling="2026-03-20 15:54:14.793333164 +0000 UTC m=+984.216036005" observedRunningTime="2026-03-20 15:54:15.642114224 +0000 UTC m=+985.064817065" watchObservedRunningTime="2026-03-20 15:54:15.645187097 +0000 UTC m=+985.067889938" Mar 20 15:54:29 crc kubenswrapper[4813]: I0320 15:54:29.450314 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-795784bfcc-vcd2s" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.103128 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8h86t"] Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.104656 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.115943 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8h86t"] Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.250866 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-utilities\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.250931 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-catalog-content\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.250956 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsg9f\" (UniqueName: \"kubernetes.io/projected/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-kube-api-access-rsg9f\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.352011 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-utilities\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.352088 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-catalog-content\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.352111 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsg9f\" (UniqueName: \"kubernetes.io/projected/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-kube-api-access-rsg9f\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.352551 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-utilities\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.352683 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-catalog-content\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.375725 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsg9f\" (UniqueName: \"kubernetes.io/projected/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-kube-api-access-rsg9f\") pod \"certified-operators-8h86t\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.423287 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.673502 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8h86t"] Mar 20 15:54:35 crc kubenswrapper[4813]: I0320 15:54:35.745257 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8h86t" event={"ID":"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb","Type":"ContainerStarted","Data":"f6e6e78706c0350876090165c0a979b5d1e267e719d6c20013a60d84acbc162f"} Mar 20 15:54:36 crc kubenswrapper[4813]: I0320 15:54:36.752774 4813 generic.go:334] "Generic (PLEG): container finished" podID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerID="78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a" exitCode=0 Mar 20 15:54:36 crc kubenswrapper[4813]: I0320 15:54:36.752863 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8h86t" event={"ID":"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb","Type":"ContainerDied","Data":"78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a"} Mar 20 15:54:37 crc kubenswrapper[4813]: I0320 15:54:37.760393 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8h86t" event={"ID":"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb","Type":"ContainerStarted","Data":"f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660"} Mar 20 15:54:38 crc kubenswrapper[4813]: I0320 15:54:38.769160 4813 generic.go:334] "Generic (PLEG): container finished" podID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerID="f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660" exitCode=0 Mar 20 15:54:38 crc kubenswrapper[4813]: I0320 15:54:38.769213 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8h86t" event={"ID":"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb","Type":"ContainerDied","Data":"f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660"} Mar 20 15:54:39 crc kubenswrapper[4813]: I0320 15:54:39.777462 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8h86t" event={"ID":"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb","Type":"ContainerStarted","Data":"37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a"} Mar 20 15:54:39 crc kubenswrapper[4813]: I0320 15:54:39.804029 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8h86t" podStartSLOduration=2.399577712 podStartE2EDuration="4.804011253s" podCreationTimestamp="2026-03-20 15:54:35 +0000 UTC" firstStartedPulling="2026-03-20 15:54:36.754114601 +0000 UTC m=+1006.176817452" lastFinishedPulling="2026-03-20 15:54:39.158548112 +0000 UTC m=+1008.581250993" observedRunningTime="2026-03-20 15:54:39.799015869 +0000 UTC m=+1009.221718730" watchObservedRunningTime="2026-03-20 15:54:39.804011253 +0000 UTC m=+1009.226714084" Mar 20 15:54:45 crc kubenswrapper[4813]: I0320 15:54:45.424136 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:45 crc kubenswrapper[4813]: I0320 15:54:45.425012 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:45 crc kubenswrapper[4813]: I0320 15:54:45.473670 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:45 crc kubenswrapper[4813]: I0320 15:54:45.895439 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:45 crc kubenswrapper[4813]: I0320 15:54:45.947001 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8h86t"] Mar 20 15:54:47 crc kubenswrapper[4813]: I0320 15:54:47.829067 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8h86t" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="registry-server" containerID="cri-o://37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a" gracePeriod=2 Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.212834 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.333775 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-utilities\") pod \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.333843 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsg9f\" (UniqueName: \"kubernetes.io/projected/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-kube-api-access-rsg9f\") pod \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.334243 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-catalog-content\") pod \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\" (UID: \"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb\") " Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.334942 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-utilities" (OuterVolumeSpecName: "utilities") pod "5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" (UID: "5edfd173-ddad-49ee-9eaa-3da9c7ee44cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.341592 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-kube-api-access-rsg9f" (OuterVolumeSpecName: "kube-api-access-rsg9f") pod "5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" (UID: "5edfd173-ddad-49ee-9eaa-3da9c7ee44cb"). InnerVolumeSpecName "kube-api-access-rsg9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.398555 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" (UID: "5edfd173-ddad-49ee-9eaa-3da9c7ee44cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.435294 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.435328 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.435343 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsg9f\" (UniqueName: \"kubernetes.io/projected/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb-kube-api-access-rsg9f\") on node \"crc\" DevicePath \"\"" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.836845 4813 generic.go:334] "Generic (PLEG): container finished" podID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerID="37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a" exitCode=0 Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.836898 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8h86t" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.836902 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8h86t" event={"ID":"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb","Type":"ContainerDied","Data":"37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a"} Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.837021 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8h86t" event={"ID":"5edfd173-ddad-49ee-9eaa-3da9c7ee44cb","Type":"ContainerDied","Data":"f6e6e78706c0350876090165c0a979b5d1e267e719d6c20013a60d84acbc162f"} Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.837047 4813 scope.go:117] "RemoveContainer" containerID="37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.853007 4813 scope.go:117] "RemoveContainer" containerID="f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.872120 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8h86t"] Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.876829 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8h86t"] Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.880304 4813 scope.go:117] "RemoveContainer" containerID="78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.902290 4813 scope.go:117] "RemoveContainer" containerID="37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a" Mar 20 15:54:48 crc kubenswrapper[4813]: E0320 15:54:48.902795 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a\": container with ID starting with 37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a not found: ID does not exist" containerID="37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.902828 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a"} err="failed to get container status \"37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a\": rpc error: code = NotFound desc = could not find container \"37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a\": container with ID starting with 37763b6daf78bb4ec62daaa5ea6d2ba7f5cf4cc3fee913ea8bf38cfd9140118a not found: ID does not exist" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.902850 4813 scope.go:117] "RemoveContainer" containerID="f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660" Mar 20 15:54:48 crc kubenswrapper[4813]: E0320 15:54:48.903087 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660\": container with ID starting with f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660 not found: ID does not exist" containerID="f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.903111 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660"} err="failed to get container status \"f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660\": rpc error: code = NotFound desc = could not find container \"f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660\": container with ID starting with f1141069deaa915ce03d60279e92d122199e4ac644d7d7df64c23b0d5fffb660 not found: ID does not exist" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.903129 4813 scope.go:117] "RemoveContainer" containerID="78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a" Mar 20 15:54:48 crc kubenswrapper[4813]: E0320 15:54:48.903473 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a\": container with ID starting with 78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a not found: ID does not exist" containerID="78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a" Mar 20 15:54:48 crc kubenswrapper[4813]: I0320 15:54:48.904151 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a"} err="failed to get container status \"78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a\": rpc error: code = NotFound desc = could not find container \"78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a\": container with ID starting with 78e4d9c8dc185b5c5d50417e12e47c74c12403944c34390b91fc7627b895e69a not found: ID does not exist" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.080292 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-bfc448998-db8w6" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.273999 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" path="/var/lib/kubelet/pods/5edfd173-ddad-49ee-9eaa-3da9c7ee44cb/volumes" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.321055 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h5fcq"] Mar 20 15:54:49 crc kubenswrapper[4813]: E0320 15:54:49.321458 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="extract-content" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.321564 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="extract-content" Mar 20 15:54:49 crc kubenswrapper[4813]: E0320 15:54:49.321632 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="registry-server" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.321690 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="registry-server" Mar 20 15:54:49 crc kubenswrapper[4813]: E0320 15:54:49.321747 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="extract-utilities" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.321804 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="extract-utilities" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.321969 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5edfd173-ddad-49ee-9eaa-3da9c7ee44cb" containerName="registry-server" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.328215 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.347859 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp7dq\" (UniqueName: \"kubernetes.io/projected/5556f718-e276-4dfe-86e9-3ed527251e5c-kube-api-access-wp7dq\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.348001 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-catalog-content\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.348106 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-utilities\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.366384 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5fcq"] Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.448903 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-catalog-content\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.448976 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-utilities\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.449041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp7dq\" (UniqueName: \"kubernetes.io/projected/5556f718-e276-4dfe-86e9-3ed527251e5c-kube-api-access-wp7dq\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.449555 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-catalog-content\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.449683 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-utilities\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.473721 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp7dq\" (UniqueName: \"kubernetes.io/projected/5556f718-e276-4dfe-86e9-3ed527251e5c-kube-api-access-wp7dq\") pod \"community-operators-h5fcq\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.656876 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.748747 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-fbdp4"] Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.758335 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.759570 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk"] Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.760850 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.761607 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.761852 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.762213 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-fzz5p" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.762398 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.765870 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk"] Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.876012 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-skmm6"] Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.888375 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-skmm6" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.889944 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-7bb4cc7c98-f45rt"] Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.891952 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.898084 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.898309 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-6fw7x" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.898406 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.898527 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.901253 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.905689 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-7bb4cc7c98-f45rt"] Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956514 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-conf\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956564 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956584 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-sockets\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956600 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6hm9\" (UniqueName: \"kubernetes.io/projected/c9ca7072-6004-41c6-8090-d5bd3369994b-kube-api-access-m6hm9\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956616 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b9924c18-4463-4445-a18f-5bd9e6ec1334-metallb-excludel2\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956660 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smpbn\" (UniqueName: \"kubernetes.io/projected/b9924c18-4463-4445-a18f-5bd9e6ec1334-kube-api-access-smpbn\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956674 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9sbq\" (UniqueName: \"kubernetes.io/projected/5d96ab75-0ca7-47bc-b85d-b39649c76561-kube-api-access-d9sbq\") pod \"frr-k8s-webhook-server-bcc4b6f68-fxssk\" (UID: \"5d96ab75-0ca7-47bc-b85d-b39649c76561\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956693 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-metrics-certs\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956710 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-metrics-certs\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956740 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mngtg\" (UniqueName: \"kubernetes.io/projected/090f54f0-8cbc-4ecf-b792-c160a26595ff-kube-api-access-mngtg\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956757 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-startup\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956771 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d96ab75-0ca7-47bc-b85d-b39649c76561-cert\") pod \"frr-k8s-webhook-server-bcc4b6f68-fxssk\" (UID: \"5d96ab75-0ca7-47bc-b85d-b39649c76561\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956798 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-reloader\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956819 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics-certs\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:49 crc kubenswrapper[4813]: I0320 15:54:49.956838 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-cert\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060062 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-metrics-certs\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060115 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-metrics-certs\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060149 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mngtg\" (UniqueName: \"kubernetes.io/projected/090f54f0-8cbc-4ecf-b792-c160a26595ff-kube-api-access-mngtg\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060167 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d96ab75-0ca7-47bc-b85d-b39649c76561-cert\") pod \"frr-k8s-webhook-server-bcc4b6f68-fxssk\" (UID: \"5d96ab75-0ca7-47bc-b85d-b39649c76561\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060183 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-startup\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060209 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-reloader\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060232 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics-certs\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060253 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-cert\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060283 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-conf\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060305 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060329 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-sockets\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060346 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6hm9\" (UniqueName: \"kubernetes.io/projected/c9ca7072-6004-41c6-8090-d5bd3369994b-kube-api-access-m6hm9\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060359 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b9924c18-4463-4445-a18f-5bd9e6ec1334-metallb-excludel2\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060384 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060402 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smpbn\" (UniqueName: \"kubernetes.io/projected/b9924c18-4463-4445-a18f-5bd9e6ec1334-kube-api-access-smpbn\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.060419 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9sbq\" (UniqueName: \"kubernetes.io/projected/5d96ab75-0ca7-47bc-b85d-b39649c76561-kube-api-access-d9sbq\") pod \"frr-k8s-webhook-server-bcc4b6f68-fxssk\" (UID: \"5d96ab75-0ca7-47bc-b85d-b39649c76561\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.061059 4813 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.061104 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-metrics-certs podName:b9924c18-4463-4445-a18f-5bd9e6ec1334 nodeName:}" failed. No retries permitted until 2026-03-20 15:54:50.561090945 +0000 UTC m=+1019.983793786 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-metrics-certs") pod "speaker-skmm6" (UID: "b9924c18-4463-4445-a18f-5bd9e6ec1334") : secret "speaker-certs-secret" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.061250 4813 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.061272 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-metrics-certs podName:c9ca7072-6004-41c6-8090-d5bd3369994b nodeName:}" failed. No retries permitted until 2026-03-20 15:54:50.56126376 +0000 UTC m=+1019.983966601 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-metrics-certs") pod "controller-7bb4cc7c98-f45rt" (UID: "c9ca7072-6004-41c6-8090-d5bd3369994b") : secret "controller-certs-secret" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.061417 4813 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.061438 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5d96ab75-0ca7-47bc-b85d-b39649c76561-cert podName:5d96ab75-0ca7-47bc-b85d-b39649c76561 nodeName:}" failed. No retries permitted until 2026-03-20 15:54:50.561430885 +0000 UTC m=+1019.984133726 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5d96ab75-0ca7-47bc-b85d-b39649c76561-cert") pod "frr-k8s-webhook-server-bcc4b6f68-fxssk" (UID: "5d96ab75-0ca7-47bc-b85d-b39649c76561") : secret "frr-k8s-webhook-server-cert" not found Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.062254 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-startup\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.062463 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-reloader\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.062528 4813 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.062551 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics-certs podName:090f54f0-8cbc-4ecf-b792-c160a26595ff nodeName:}" failed. No retries permitted until 2026-03-20 15:54:50.562543174 +0000 UTC m=+1019.985246015 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics-certs") pod "frr-k8s-fbdp4" (UID: "090f54f0-8cbc-4ecf-b792-c160a26595ff") : secret "frr-k8s-certs-secret" not found Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.064053 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-conf\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.064147 4813 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.064182 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist podName:b9924c18-4463-4445-a18f-5bd9e6ec1334 nodeName:}" failed. No retries permitted until 2026-03-20 15:54:50.564169248 +0000 UTC m=+1019.986872089 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist") pod "speaker-skmm6" (UID: "b9924c18-4463-4445-a18f-5bd9e6ec1334") : secret "metallb-memberlist" not found Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.064339 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-frr-sockets\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.064521 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.065168 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b9924c18-4463-4445-a18f-5bd9e6ec1334-metallb-excludel2\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.078683 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-cert\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.088176 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smpbn\" (UniqueName: \"kubernetes.io/projected/b9924c18-4463-4445-a18f-5bd9e6ec1334-kube-api-access-smpbn\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.091790 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6hm9\" (UniqueName: \"kubernetes.io/projected/c9ca7072-6004-41c6-8090-d5bd3369994b-kube-api-access-m6hm9\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.095007 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mngtg\" (UniqueName: \"kubernetes.io/projected/090f54f0-8cbc-4ecf-b792-c160a26595ff-kube-api-access-mngtg\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.095254 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9sbq\" (UniqueName: \"kubernetes.io/projected/5d96ab75-0ca7-47bc-b85d-b39649c76561-kube-api-access-d9sbq\") pod \"frr-k8s-webhook-server-bcc4b6f68-fxssk\" (UID: \"5d96ab75-0ca7-47bc-b85d-b39649c76561\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.291995 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5fcq"] Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.566161 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics-certs\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.566493 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.566553 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-metrics-certs\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.566582 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-metrics-certs\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.566618 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d96ab75-0ca7-47bc-b85d-b39649c76561-cert\") pod \"frr-k8s-webhook-server-bcc4b6f68-fxssk\" (UID: \"5d96ab75-0ca7-47bc-b85d-b39649c76561\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.566737 4813 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Mar 20 15:54:50 crc kubenswrapper[4813]: E0320 15:54:50.566846 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist podName:b9924c18-4463-4445-a18f-5bd9e6ec1334 nodeName:}" failed. No retries permitted until 2026-03-20 15:54:51.566819885 +0000 UTC m=+1020.989522816 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist") pod "speaker-skmm6" (UID: "b9924c18-4463-4445-a18f-5bd9e6ec1334") : secret "metallb-memberlist" not found Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.572014 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-metrics-certs\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.572056 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d96ab75-0ca7-47bc-b85d-b39649c76561-cert\") pod \"frr-k8s-webhook-server-bcc4b6f68-fxssk\" (UID: \"5d96ab75-0ca7-47bc-b85d-b39649c76561\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.572071 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9ca7072-6004-41c6-8090-d5bd3369994b-metrics-certs\") pod \"controller-7bb4cc7c98-f45rt\" (UID: \"c9ca7072-6004-41c6-8090-d5bd3369994b\") " pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.572236 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/090f54f0-8cbc-4ecf-b792-c160a26595ff-metrics-certs\") pod \"frr-k8s-fbdp4\" (UID: \"090f54f0-8cbc-4ecf-b792-c160a26595ff\") " pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.678715 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.685952 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.851065 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.872138 4813 generic.go:334] "Generic (PLEG): container finished" podID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerID="e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101" exitCode=0 Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.872235 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5fcq" event={"ID":"5556f718-e276-4dfe-86e9-3ed527251e5c","Type":"ContainerDied","Data":"e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101"} Mar 20 15:54:50 crc kubenswrapper[4813]: I0320 15:54:50.872464 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5fcq" event={"ID":"5556f718-e276-4dfe-86e9-3ed527251e5c","Type":"ContainerStarted","Data":"7ffdd757d05215acdd50743f13332361931c2961d422675f84f25356f1e4912e"} Mar 20 15:54:51 crc kubenswrapper[4813]: I0320 15:54:51.280946 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk"] Mar 20 15:54:51 crc kubenswrapper[4813]: W0320 15:54:51.286929 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d96ab75_0ca7_47bc_b85d_b39649c76561.slice/crio-d51e3d60577ed20758f6c1f19e014bd3fe7cf62cc34c192b06089e9c8af9fef4 WatchSource:0}: Error finding container d51e3d60577ed20758f6c1f19e014bd3fe7cf62cc34c192b06089e9c8af9fef4: Status 404 returned error can't find the container with id d51e3d60577ed20758f6c1f19e014bd3fe7cf62cc34c192b06089e9c8af9fef4 Mar 20 15:54:51 crc kubenswrapper[4813]: I0320 15:54:51.365067 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-7bb4cc7c98-f45rt"] Mar 20 15:54:51 crc kubenswrapper[4813]: I0320 15:54:51.578707 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:51 crc kubenswrapper[4813]: E0320 15:54:51.578827 4813 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Mar 20 15:54:51 crc kubenswrapper[4813]: E0320 15:54:51.578881 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist podName:b9924c18-4463-4445-a18f-5bd9e6ec1334 nodeName:}" failed. No retries permitted until 2026-03-20 15:54:53.57886497 +0000 UTC m=+1023.001567811 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist") pod "speaker-skmm6" (UID: "b9924c18-4463-4445-a18f-5bd9e6ec1334") : secret "metallb-memberlist" not found Mar 20 15:54:51 crc kubenswrapper[4813]: I0320 15:54:51.883079 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-7bb4cc7c98-f45rt" event={"ID":"c9ca7072-6004-41c6-8090-d5bd3369994b","Type":"ContainerStarted","Data":"198cf98cc8a91085a17815380eac9b7609f4be687cbbfcf7e532c2340f07206a"} Mar 20 15:54:51 crc kubenswrapper[4813]: I0320 15:54:51.883350 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-7bb4cc7c98-f45rt" event={"ID":"c9ca7072-6004-41c6-8090-d5bd3369994b","Type":"ContainerStarted","Data":"39055fc6df793b02d2213ec8296da4d0fd8e2748d87b9e35bb1b704851f99aaf"} Mar 20 15:54:51 crc kubenswrapper[4813]: I0320 15:54:51.888373 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerStarted","Data":"5f32618edfa18e2b67797ab88323bc320e6092c37a622dd1728c81bd9b7c9f3e"} Mar 20 15:54:51 crc kubenswrapper[4813]: I0320 15:54:51.890365 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" event={"ID":"5d96ab75-0ca7-47bc-b85d-b39649c76561","Type":"ContainerStarted","Data":"d51e3d60577ed20758f6c1f19e014bd3fe7cf62cc34c192b06089e9c8af9fef4"} Mar 20 15:54:52 crc kubenswrapper[4813]: I0320 15:54:52.897389 4813 generic.go:334] "Generic (PLEG): container finished" podID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerID="aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f" exitCode=0 Mar 20 15:54:52 crc kubenswrapper[4813]: I0320 15:54:52.897454 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5fcq" event={"ID":"5556f718-e276-4dfe-86e9-3ed527251e5c","Type":"ContainerDied","Data":"aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f"} Mar 20 15:54:52 crc kubenswrapper[4813]: I0320 15:54:52.901312 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-7bb4cc7c98-f45rt" event={"ID":"c9ca7072-6004-41c6-8090-d5bd3369994b","Type":"ContainerStarted","Data":"49b1bc716358bee5f5fbf4edd86213af29dfeb96fd4b48eedaf3739e407f1d6c"} Mar 20 15:54:52 crc kubenswrapper[4813]: I0320 15:54:52.901665 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.599586 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.615652 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b9924c18-4463-4445-a18f-5bd9e6ec1334-memberlist\") pod \"speaker-skmm6\" (UID: \"b9924c18-4463-4445-a18f-5bd9e6ec1334\") " pod="metallb-system/speaker-skmm6" Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.837784 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-6fw7x" Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.845139 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-skmm6" Mar 20 15:54:53 crc kubenswrapper[4813]: W0320 15:54:53.874519 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9924c18_4463_4445_a18f_5bd9e6ec1334.slice/crio-63f93d29d596cc10833e6d1b11389159d629cecbec40335baf8b405cead0d5cb WatchSource:0}: Error finding container 63f93d29d596cc10833e6d1b11389159d629cecbec40335baf8b405cead0d5cb: Status 404 returned error can't find the container with id 63f93d29d596cc10833e6d1b11389159d629cecbec40335baf8b405cead0d5cb Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.907049 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-skmm6" event={"ID":"b9924c18-4463-4445-a18f-5bd9e6ec1334","Type":"ContainerStarted","Data":"63f93d29d596cc10833e6d1b11389159d629cecbec40335baf8b405cead0d5cb"} Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.909092 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5fcq" event={"ID":"5556f718-e276-4dfe-86e9-3ed527251e5c","Type":"ContainerStarted","Data":"c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c"} Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.927952 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h5fcq" podStartSLOduration=2.219876571 podStartE2EDuration="4.927932004s" podCreationTimestamp="2026-03-20 15:54:49 +0000 UTC" firstStartedPulling="2026-03-20 15:54:50.874513937 +0000 UTC m=+1020.297216788" lastFinishedPulling="2026-03-20 15:54:53.58256938 +0000 UTC m=+1023.005272221" observedRunningTime="2026-03-20 15:54:53.926189187 +0000 UTC m=+1023.348892028" watchObservedRunningTime="2026-03-20 15:54:53.927932004 +0000 UTC m=+1023.350634845" Mar 20 15:54:53 crc kubenswrapper[4813]: I0320 15:54:53.928810 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-7bb4cc7c98-f45rt" podStartSLOduration=4.928805937 podStartE2EDuration="4.928805937s" podCreationTimestamp="2026-03-20 15:54:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:54:52.952204024 +0000 UTC m=+1022.374906865" watchObservedRunningTime="2026-03-20 15:54:53.928805937 +0000 UTC m=+1023.351508778" Mar 20 15:54:54 crc kubenswrapper[4813]: I0320 15:54:54.922259 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-skmm6" event={"ID":"b9924c18-4463-4445-a18f-5bd9e6ec1334","Type":"ContainerStarted","Data":"c6faed78a080aa4881c8cc08aa9bacbe49e5c87b7c4ec76311b6314cf00b5af9"} Mar 20 15:54:54 crc kubenswrapper[4813]: I0320 15:54:54.922300 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-skmm6" event={"ID":"b9924c18-4463-4445-a18f-5bd9e6ec1334","Type":"ContainerStarted","Data":"44f4306bc8434af57934e60e5bf06a25970da119e1e617288e59015a15546c7d"} Mar 20 15:54:54 crc kubenswrapper[4813]: I0320 15:54:54.983797 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-skmm6" podStartSLOduration=5.983781925 podStartE2EDuration="5.983781925s" podCreationTimestamp="2026-03-20 15:54:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:54:54.977858006 +0000 UTC m=+1024.400560847" watchObservedRunningTime="2026-03-20 15:54:54.983781925 +0000 UTC m=+1024.406484766" Mar 20 15:54:55 crc kubenswrapper[4813]: I0320 15:54:55.928596 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-skmm6" Mar 20 15:54:59 crc kubenswrapper[4813]: I0320 15:54:59.658157 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:59 crc kubenswrapper[4813]: I0320 15:54:59.660169 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:59 crc kubenswrapper[4813]: I0320 15:54:59.734862 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:54:59 crc kubenswrapper[4813]: I0320 15:54:59.959055 4813 generic.go:334] "Generic (PLEG): container finished" podID="090f54f0-8cbc-4ecf-b792-c160a26595ff" containerID="e234c330fdcbad2b1f12e8bbf7c6d0a4d669ffbda4940e14c51e0a41d2616f75" exitCode=0 Mar 20 15:54:59 crc kubenswrapper[4813]: I0320 15:54:59.959148 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerDied","Data":"e234c330fdcbad2b1f12e8bbf7c6d0a4d669ffbda4940e14c51e0a41d2616f75"} Mar 20 15:54:59 crc kubenswrapper[4813]: I0320 15:54:59.963679 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" event={"ID":"5d96ab75-0ca7-47bc-b85d-b39649c76561","Type":"ContainerStarted","Data":"7a96b108ef0c37caee34b7b076ab5d1df655e91dd0e291e4760c20e2d0e2b6ec"} Mar 20 15:55:00 crc kubenswrapper[4813]: I0320 15:55:00.003016 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" podStartSLOduration=3.059531236 podStartE2EDuration="11.002988314s" podCreationTimestamp="2026-03-20 15:54:49 +0000 UTC" firstStartedPulling="2026-03-20 15:54:51.29061068 +0000 UTC m=+1020.713313521" lastFinishedPulling="2026-03-20 15:54:59.234067758 +0000 UTC m=+1028.656770599" observedRunningTime="2026-03-20 15:55:00.002393008 +0000 UTC m=+1029.425095859" watchObservedRunningTime="2026-03-20 15:55:00.002988314 +0000 UTC m=+1029.425691195" Mar 20 15:55:00 crc kubenswrapper[4813]: I0320 15:55:00.025734 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:55:00 crc kubenswrapper[4813]: I0320 15:55:00.071460 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5fcq"] Mar 20 15:55:00 crc kubenswrapper[4813]: I0320 15:55:00.686644 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:55:00 crc kubenswrapper[4813]: I0320 15:55:00.973019 4813 generic.go:334] "Generic (PLEG): container finished" podID="090f54f0-8cbc-4ecf-b792-c160a26595ff" containerID="b10ee27bc2e4874c14dcda9b581461a0f8308d8f59e6699c69125f7a7c68aa6e" exitCode=0 Mar 20 15:55:00 crc kubenswrapper[4813]: I0320 15:55:00.973179 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerDied","Data":"b10ee27bc2e4874c14dcda9b581461a0f8308d8f59e6699c69125f7a7c68aa6e"} Mar 20 15:55:01 crc kubenswrapper[4813]: I0320 15:55:01.982602 4813 generic.go:334] "Generic (PLEG): container finished" podID="090f54f0-8cbc-4ecf-b792-c160a26595ff" containerID="82cebf228f7acaadcbed78c8c2a7293047321212e22d6da80e76e48e5267e0bb" exitCode=0 Mar 20 15:55:01 crc kubenswrapper[4813]: I0320 15:55:01.982889 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerDied","Data":"82cebf228f7acaadcbed78c8c2a7293047321212e22d6da80e76e48e5267e0bb"} Mar 20 15:55:01 crc kubenswrapper[4813]: I0320 15:55:01.983979 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h5fcq" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="registry-server" containerID="cri-o://c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c" gracePeriod=2 Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.811300 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.937653 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-utilities\") pod \"5556f718-e276-4dfe-86e9-3ed527251e5c\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.937719 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp7dq\" (UniqueName: \"kubernetes.io/projected/5556f718-e276-4dfe-86e9-3ed527251e5c-kube-api-access-wp7dq\") pod \"5556f718-e276-4dfe-86e9-3ed527251e5c\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.937756 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-catalog-content\") pod \"5556f718-e276-4dfe-86e9-3ed527251e5c\" (UID: \"5556f718-e276-4dfe-86e9-3ed527251e5c\") " Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.938613 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-utilities" (OuterVolumeSpecName: "utilities") pod "5556f718-e276-4dfe-86e9-3ed527251e5c" (UID: "5556f718-e276-4dfe-86e9-3ed527251e5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.949371 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5556f718-e276-4dfe-86e9-3ed527251e5c-kube-api-access-wp7dq" (OuterVolumeSpecName: "kube-api-access-wp7dq") pod "5556f718-e276-4dfe-86e9-3ed527251e5c" (UID: "5556f718-e276-4dfe-86e9-3ed527251e5c"). InnerVolumeSpecName "kube-api-access-wp7dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.996148 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerStarted","Data":"7a17efc4e733327f93dede8632b4e4ae7436ccbe8745b5180f0ec45721007c8a"} Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.996188 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerStarted","Data":"fd263e1061ae087b96b28e9f40e114e46fc02971555667d21c4f16579303158d"} Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.996200 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerStarted","Data":"0ac96ef9718a785f4ae00bddcd893162b3db42eab53a7601402b32a731f27920"} Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.996210 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerStarted","Data":"8739a023e107813f2c9a6212afca0930961d6b5639afdbea15cb46835097e7cd"} Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.996220 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerStarted","Data":"5e3dbd205762495f7332a28632edf44c2983333a82be8b8074f073b357e1ba32"} Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.998231 4813 generic.go:334] "Generic (PLEG): container finished" podID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerID="c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c" exitCode=0 Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.998282 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5fcq" Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.998267 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5fcq" event={"ID":"5556f718-e276-4dfe-86e9-3ed527251e5c","Type":"ContainerDied","Data":"c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c"} Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.998413 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5fcq" event={"ID":"5556f718-e276-4dfe-86e9-3ed527251e5c","Type":"ContainerDied","Data":"7ffdd757d05215acdd50743f13332361931c2961d422675f84f25356f1e4912e"} Mar 20 15:55:02 crc kubenswrapper[4813]: I0320 15:55:02.998437 4813 scope.go:117] "RemoveContainer" containerID="c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.021541 4813 scope.go:117] "RemoveContainer" containerID="aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.038932 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.038963 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp7dq\" (UniqueName: \"kubernetes.io/projected/5556f718-e276-4dfe-86e9-3ed527251e5c-kube-api-access-wp7dq\") on node \"crc\" DevicePath \"\"" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.082324 4813 scope.go:117] "RemoveContainer" containerID="e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.106678 4813 scope.go:117] "RemoveContainer" containerID="c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c" Mar 20 15:55:03 crc kubenswrapper[4813]: E0320 15:55:03.107061 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c\": container with ID starting with c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c not found: ID does not exist" containerID="c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.107090 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c"} err="failed to get container status \"c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c\": rpc error: code = NotFound desc = could not find container \"c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c\": container with ID starting with c51f19e8de9dbb35f64e0a6f3e5ca2a0e23a2050a7f7ad1f2cce37f5feea529c not found: ID does not exist" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.107109 4813 scope.go:117] "RemoveContainer" containerID="aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f" Mar 20 15:55:03 crc kubenswrapper[4813]: E0320 15:55:03.107365 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f\": container with ID starting with aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f not found: ID does not exist" containerID="aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.107383 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f"} err="failed to get container status \"aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f\": rpc error: code = NotFound desc = could not find container \"aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f\": container with ID starting with aead963be0bfcab5f3e31a22b9305ea0e879e3dff16c426a2c711fe69adde83f not found: ID does not exist" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.107395 4813 scope.go:117] "RemoveContainer" containerID="e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101" Mar 20 15:55:03 crc kubenswrapper[4813]: E0320 15:55:03.107725 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101\": container with ID starting with e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101 not found: ID does not exist" containerID="e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.107749 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101"} err="failed to get container status \"e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101\": rpc error: code = NotFound desc = could not find container \"e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101\": container with ID starting with e7a459e1aa73622290074eb76f3b4d1bf3d32ae1f8c35b79c4d63029c4a50101 not found: ID does not exist" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.711125 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5556f718-e276-4dfe-86e9-3ed527251e5c" (UID: "5556f718-e276-4dfe-86e9-3ed527251e5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.749714 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5556f718-e276-4dfe-86e9-3ed527251e5c-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.935768 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5fcq"] Mar 20 15:55:03 crc kubenswrapper[4813]: I0320 15:55:03.948074 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h5fcq"] Mar 20 15:55:04 crc kubenswrapper[4813]: I0320 15:55:04.029353 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-fbdp4" event={"ID":"090f54f0-8cbc-4ecf-b792-c160a26595ff","Type":"ContainerStarted","Data":"e1e77ef9b50b4d3c3050d924db62f30e44a5a3e932b9a5db7f4fc84eb8ba4abf"} Mar 20 15:55:04 crc kubenswrapper[4813]: I0320 15:55:04.030331 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:55:05 crc kubenswrapper[4813]: I0320 15:55:05.278640 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" path="/var/lib/kubelet/pods/5556f718-e276-4dfe-86e9-3ed527251e5c/volumes" Mar 20 15:55:05 crc kubenswrapper[4813]: I0320 15:55:05.678996 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:55:05 crc kubenswrapper[4813]: I0320 15:55:05.735162 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:55:05 crc kubenswrapper[4813]: I0320 15:55:05.762153 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-fbdp4" podStartSLOduration=8.469468258 podStartE2EDuration="16.762138183s" podCreationTimestamp="2026-03-20 15:54:49 +0000 UTC" firstStartedPulling="2026-03-20 15:54:50.92005147 +0000 UTC m=+1020.342754311" lastFinishedPulling="2026-03-20 15:54:59.212721395 +0000 UTC m=+1028.635424236" observedRunningTime="2026-03-20 15:55:04.077856349 +0000 UTC m=+1033.500559190" watchObservedRunningTime="2026-03-20 15:55:05.762138183 +0000 UTC m=+1035.184841024" Mar 20 15:55:06 crc kubenswrapper[4813]: I0320 15:55:06.234900 4813 scope.go:117] "RemoveContainer" containerID="5728b1c29d28538b8c53ebdf94db21b86e32012c9f9955d5384578134aa71543" Mar 20 15:55:10 crc kubenswrapper[4813]: I0320 15:55:10.690781 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-fxssk" Mar 20 15:55:10 crc kubenswrapper[4813]: I0320 15:55:10.856456 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-7bb4cc7c98-f45rt" Mar 20 15:55:13 crc kubenswrapper[4813]: I0320 15:55:13.849519 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-skmm6" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.431620 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9"] Mar 20 15:55:15 crc kubenswrapper[4813]: E0320 15:55:15.431957 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="extract-content" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.431974 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="extract-content" Mar 20 15:55:15 crc kubenswrapper[4813]: E0320 15:55:15.431987 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="registry-server" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.431996 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="registry-server" Mar 20 15:55:15 crc kubenswrapper[4813]: E0320 15:55:15.432018 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="extract-utilities" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.432027 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="extract-utilities" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.432171 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5556f718-e276-4dfe-86e9-3ed527251e5c" containerName="registry-server" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.433277 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.439409 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9"] Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.440851 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.603098 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.603166 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf6pb\" (UniqueName: \"kubernetes.io/projected/665d8eab-08d7-4b69-b20c-75c8151bb3b5-kube-api-access-bf6pb\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.603390 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.704614 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.704711 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.704781 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf6pb\" (UniqueName: \"kubernetes.io/projected/665d8eab-08d7-4b69-b20c-75c8151bb3b5-kube-api-access-bf6pb\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.705227 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.705335 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.740900 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf6pb\" (UniqueName: \"kubernetes.io/projected/665d8eab-08d7-4b69-b20c-75c8151bb3b5-kube-api-access-bf6pb\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.758053 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:15 crc kubenswrapper[4813]: I0320 15:55:15.959214 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9"] Mar 20 15:55:15 crc kubenswrapper[4813]: W0320 15:55:15.966454 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod665d8eab_08d7_4b69_b20c_75c8151bb3b5.slice/crio-7c3d16f96d60a89886a3ccc205bc2a8dd4b29295bb475d5c87b0c3aab6a795fe WatchSource:0}: Error finding container 7c3d16f96d60a89886a3ccc205bc2a8dd4b29295bb475d5c87b0c3aab6a795fe: Status 404 returned error can't find the container with id 7c3d16f96d60a89886a3ccc205bc2a8dd4b29295bb475d5c87b0c3aab6a795fe Mar 20 15:55:16 crc kubenswrapper[4813]: I0320 15:55:16.152440 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" event={"ID":"665d8eab-08d7-4b69-b20c-75c8151bb3b5","Type":"ContainerStarted","Data":"950c777b3286f74cacf2f0ad7dcc7946a2a9883e68161f200bd63243f018cf2d"} Mar 20 15:55:16 crc kubenswrapper[4813]: I0320 15:55:16.152509 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" event={"ID":"665d8eab-08d7-4b69-b20c-75c8151bb3b5","Type":"ContainerStarted","Data":"7c3d16f96d60a89886a3ccc205bc2a8dd4b29295bb475d5c87b0c3aab6a795fe"} Mar 20 15:55:17 crc kubenswrapper[4813]: I0320 15:55:17.163276 4813 generic.go:334] "Generic (PLEG): container finished" podID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerID="950c777b3286f74cacf2f0ad7dcc7946a2a9883e68161f200bd63243f018cf2d" exitCode=0 Mar 20 15:55:17 crc kubenswrapper[4813]: I0320 15:55:17.163336 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" event={"ID":"665d8eab-08d7-4b69-b20c-75c8151bb3b5","Type":"ContainerDied","Data":"950c777b3286f74cacf2f0ad7dcc7946a2a9883e68161f200bd63243f018cf2d"} Mar 20 15:55:20 crc kubenswrapper[4813]: I0320 15:55:20.682094 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-fbdp4" Mar 20 15:55:22 crc kubenswrapper[4813]: I0320 15:55:22.199381 4813 generic.go:334] "Generic (PLEG): container finished" podID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerID="fc5232e9e31175cce3f04d6e15a2030480dd4d3e06e3bdfcd48b2e24c34046e0" exitCode=0 Mar 20 15:55:22 crc kubenswrapper[4813]: I0320 15:55:22.199430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" event={"ID":"665d8eab-08d7-4b69-b20c-75c8151bb3b5","Type":"ContainerDied","Data":"fc5232e9e31175cce3f04d6e15a2030480dd4d3e06e3bdfcd48b2e24c34046e0"} Mar 20 15:55:23 crc kubenswrapper[4813]: I0320 15:55:23.209025 4813 generic.go:334] "Generic (PLEG): container finished" podID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerID="0faeac6e35c8df646787ce422e181c635bbd75cffcf79924090dab171c78e02b" exitCode=0 Mar 20 15:55:23 crc kubenswrapper[4813]: I0320 15:55:23.209398 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" event={"ID":"665d8eab-08d7-4b69-b20c-75c8151bb3b5","Type":"ContainerDied","Data":"0faeac6e35c8df646787ce422e181c635bbd75cffcf79924090dab171c78e02b"} Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.512436 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.627609 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf6pb\" (UniqueName: \"kubernetes.io/projected/665d8eab-08d7-4b69-b20c-75c8151bb3b5-kube-api-access-bf6pb\") pod \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.627656 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-util\") pod \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.627814 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-bundle\") pod \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\" (UID: \"665d8eab-08d7-4b69-b20c-75c8151bb3b5\") " Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.630276 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-bundle" (OuterVolumeSpecName: "bundle") pod "665d8eab-08d7-4b69-b20c-75c8151bb3b5" (UID: "665d8eab-08d7-4b69-b20c-75c8151bb3b5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.633141 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/665d8eab-08d7-4b69-b20c-75c8151bb3b5-kube-api-access-bf6pb" (OuterVolumeSpecName: "kube-api-access-bf6pb") pod "665d8eab-08d7-4b69-b20c-75c8151bb3b5" (UID: "665d8eab-08d7-4b69-b20c-75c8151bb3b5"). InnerVolumeSpecName "kube-api-access-bf6pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.638815 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-util" (OuterVolumeSpecName: "util") pod "665d8eab-08d7-4b69-b20c-75c8151bb3b5" (UID: "665d8eab-08d7-4b69-b20c-75c8151bb3b5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.728938 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.729265 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf6pb\" (UniqueName: \"kubernetes.io/projected/665d8eab-08d7-4b69-b20c-75c8151bb3b5-kube-api-access-bf6pb\") on node \"crc\" DevicePath \"\"" Mar 20 15:55:24 crc kubenswrapper[4813]: I0320 15:55:24.729276 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/665d8eab-08d7-4b69-b20c-75c8151bb3b5-util\") on node \"crc\" DevicePath \"\"" Mar 20 15:55:25 crc kubenswrapper[4813]: I0320 15:55:25.262721 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" event={"ID":"665d8eab-08d7-4b69-b20c-75c8151bb3b5","Type":"ContainerDied","Data":"7c3d16f96d60a89886a3ccc205bc2a8dd4b29295bb475d5c87b0c3aab6a795fe"} Mar 20 15:55:25 crc kubenswrapper[4813]: I0320 15:55:25.262772 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c3d16f96d60a89886a3ccc205bc2a8dd4b29295bb475d5c87b0c3aab6a795fe" Mar 20 15:55:25 crc kubenswrapper[4813]: I0320 15:55:25.262853 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.429246 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc"] Mar 20 15:55:29 crc kubenswrapper[4813]: E0320 15:55:29.429837 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerName="util" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.429853 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerName="util" Mar 20 15:55:29 crc kubenswrapper[4813]: E0320 15:55:29.429869 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerName="pull" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.429877 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerName="pull" Mar 20 15:55:29 crc kubenswrapper[4813]: E0320 15:55:29.429893 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerName="extract" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.429903 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerName="extract" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.430044 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="665d8eab-08d7-4b69-b20c-75c8151bb3b5" containerName="extract" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.430606 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.432975 4813 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-xxsxv" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.433040 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.433725 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.457799 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc"] Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.598786 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87g28\" (UniqueName: \"kubernetes.io/projected/0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab-kube-api-access-87g28\") pod \"cert-manager-operator-controller-manager-66c8bdd694-54nlc\" (UID: \"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.598830 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-54nlc\" (UID: \"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.700193 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87g28\" (UniqueName: \"kubernetes.io/projected/0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab-kube-api-access-87g28\") pod \"cert-manager-operator-controller-manager-66c8bdd694-54nlc\" (UID: \"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.700254 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-54nlc\" (UID: \"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.700942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab-tmp\") pod \"cert-manager-operator-controller-manager-66c8bdd694-54nlc\" (UID: \"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.718667 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87g28\" (UniqueName: \"kubernetes.io/projected/0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab-kube-api-access-87g28\") pod \"cert-manager-operator-controller-manager-66c8bdd694-54nlc\" (UID: \"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:29 crc kubenswrapper[4813]: I0320 15:55:29.749317 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" Mar 20 15:55:30 crc kubenswrapper[4813]: I0320 15:55:30.208165 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc"] Mar 20 15:55:30 crc kubenswrapper[4813]: I0320 15:55:30.307019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" event={"ID":"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab","Type":"ContainerStarted","Data":"ed34e809d6959d534a5caff2441b9f35ad801ee50b9000a7dc4a8be0d16f6363"} Mar 20 15:55:34 crc kubenswrapper[4813]: I0320 15:55:34.346759 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" event={"ID":"0bd5f72b-6f44-4aeb-b0c7-e8bc3d2821ab","Type":"ContainerStarted","Data":"c91a659760eaa60e6919fd65a9254a2772ab589b8734a2687cc53806fdc954cd"} Mar 20 15:55:34 crc kubenswrapper[4813]: I0320 15:55:34.372383 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-66c8bdd694-54nlc" podStartSLOduration=1.737370713 podStartE2EDuration="5.372361436s" podCreationTimestamp="2026-03-20 15:55:29 +0000 UTC" firstStartedPulling="2026-03-20 15:55:30.214427542 +0000 UTC m=+1059.637130423" lastFinishedPulling="2026-03-20 15:55:33.849418265 +0000 UTC m=+1063.272121146" observedRunningTime="2026-03-20 15:55:34.367691181 +0000 UTC m=+1063.790394032" watchObservedRunningTime="2026-03-20 15:55:34.372361436 +0000 UTC m=+1063.795064297" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.458568 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-n56d9"] Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.459622 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.461386 4813 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-782cb" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.461718 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.463612 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.474765 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-n56d9"] Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.604658 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf4h9\" (UniqueName: \"kubernetes.io/projected/d9ff7c1b-30f8-49bf-b626-3911d0943736-kube-api-access-hf4h9\") pod \"cert-manager-webhook-6888856db4-n56d9\" (UID: \"d9ff7c1b-30f8-49bf-b626-3911d0943736\") " pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.604730 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9ff7c1b-30f8-49bf-b626-3911d0943736-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-n56d9\" (UID: \"d9ff7c1b-30f8-49bf-b626-3911d0943736\") " pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.706527 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf4h9\" (UniqueName: \"kubernetes.io/projected/d9ff7c1b-30f8-49bf-b626-3911d0943736-kube-api-access-hf4h9\") pod \"cert-manager-webhook-6888856db4-n56d9\" (UID: \"d9ff7c1b-30f8-49bf-b626-3911d0943736\") " pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.706581 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9ff7c1b-30f8-49bf-b626-3911d0943736-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-n56d9\" (UID: \"d9ff7c1b-30f8-49bf-b626-3911d0943736\") " pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.735304 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9ff7c1b-30f8-49bf-b626-3911d0943736-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-n56d9\" (UID: \"d9ff7c1b-30f8-49bf-b626-3911d0943736\") " pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.738183 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf4h9\" (UniqueName: \"kubernetes.io/projected/d9ff7c1b-30f8-49bf-b626-3911d0943736-kube-api-access-hf4h9\") pod \"cert-manager-webhook-6888856db4-n56d9\" (UID: \"d9ff7c1b-30f8-49bf-b626-3911d0943736\") " pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:37 crc kubenswrapper[4813]: I0320 15:55:37.776247 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:38 crc kubenswrapper[4813]: I0320 15:55:38.314427 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-n56d9"] Mar 20 15:55:38 crc kubenswrapper[4813]: I0320 15:55:38.373011 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" event={"ID":"d9ff7c1b-30f8-49bf-b626-3911d0943736","Type":"ContainerStarted","Data":"bab67aed2f75cc5e6dc224b419254e14164fb4b622e043096c01ebd4008c76ac"} Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.256780 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-st7jn"] Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.258232 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.263179 4813 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-7bscq" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.274582 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-st7jn"] Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.340804 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3abcfa4b-b69a-41ea-83e2-589697723bef-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-st7jn\" (UID: \"3abcfa4b-b69a-41ea-83e2-589697723bef\") " pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.340966 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkb5k\" (UniqueName: \"kubernetes.io/projected/3abcfa4b-b69a-41ea-83e2-589697723bef-kube-api-access-qkb5k\") pod \"cert-manager-cainjector-5545bd876-st7jn\" (UID: \"3abcfa4b-b69a-41ea-83e2-589697723bef\") " pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.441902 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkb5k\" (UniqueName: \"kubernetes.io/projected/3abcfa4b-b69a-41ea-83e2-589697723bef-kube-api-access-qkb5k\") pod \"cert-manager-cainjector-5545bd876-st7jn\" (UID: \"3abcfa4b-b69a-41ea-83e2-589697723bef\") " pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.441982 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3abcfa4b-b69a-41ea-83e2-589697723bef-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-st7jn\" (UID: \"3abcfa4b-b69a-41ea-83e2-589697723bef\") " pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.464902 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3abcfa4b-b69a-41ea-83e2-589697723bef-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-st7jn\" (UID: \"3abcfa4b-b69a-41ea-83e2-589697723bef\") " pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.476557 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkb5k\" (UniqueName: \"kubernetes.io/projected/3abcfa4b-b69a-41ea-83e2-589697723bef-kube-api-access-qkb5k\") pod \"cert-manager-cainjector-5545bd876-st7jn\" (UID: \"3abcfa4b-b69a-41ea-83e2-589697723bef\") " pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:40 crc kubenswrapper[4813]: I0320 15:55:40.590774 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" Mar 20 15:55:41 crc kubenswrapper[4813]: I0320 15:55:41.036027 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-st7jn"] Mar 20 15:55:41 crc kubenswrapper[4813]: I0320 15:55:41.402024 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" event={"ID":"3abcfa4b-b69a-41ea-83e2-589697723bef","Type":"ContainerStarted","Data":"f42b983c7cf3f8819b6e2cfa87913e4a0f9bb654d74cb2b0446355baa6441ce8"} Mar 20 15:55:44 crc kubenswrapper[4813]: I0320 15:55:44.421451 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" event={"ID":"d9ff7c1b-30f8-49bf-b626-3911d0943736","Type":"ContainerStarted","Data":"f6e22a2f12d72d0afad054f1d832f16dcc9bb7ec1da2dc65cc4de9172bbcf1d4"} Mar 20 15:55:44 crc kubenswrapper[4813]: I0320 15:55:44.422035 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:44 crc kubenswrapper[4813]: I0320 15:55:44.423604 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" event={"ID":"3abcfa4b-b69a-41ea-83e2-589697723bef","Type":"ContainerStarted","Data":"fb8ef1d334ba84d7db0d7969b38af4c83213850970eeb8cf4ac335209e48c562"} Mar 20 15:55:44 crc kubenswrapper[4813]: I0320 15:55:44.454588 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-5545bd876-st7jn" podStartSLOduration=1.5905379320000002 podStartE2EDuration="4.454565653s" podCreationTimestamp="2026-03-20 15:55:40 +0000 UTC" firstStartedPulling="2026-03-20 15:55:41.055012192 +0000 UTC m=+1070.477715033" lastFinishedPulling="2026-03-20 15:55:43.919039923 +0000 UTC m=+1073.341742754" observedRunningTime="2026-03-20 15:55:44.452718823 +0000 UTC m=+1073.875421664" watchObservedRunningTime="2026-03-20 15:55:44.454565653 +0000 UTC m=+1073.877268504" Mar 20 15:55:44 crc kubenswrapper[4813]: I0320 15:55:44.455783 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" podStartSLOduration=2.375087724 podStartE2EDuration="7.455775625s" podCreationTimestamp="2026-03-20 15:55:37 +0000 UTC" firstStartedPulling="2026-03-20 15:55:38.326068817 +0000 UTC m=+1067.748771658" lastFinishedPulling="2026-03-20 15:55:43.406756718 +0000 UTC m=+1072.829459559" observedRunningTime="2026-03-20 15:55:44.441700397 +0000 UTC m=+1073.864403238" watchObservedRunningTime="2026-03-20 15:55:44.455775625 +0000 UTC m=+1073.878478486" Mar 20 15:55:48 crc kubenswrapper[4813]: I0320 15:55:48.929791 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-545d4d4674-7rt5m"] Mar 20 15:55:48 crc kubenswrapper[4813]: I0320 15:55:48.930976 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:48 crc kubenswrapper[4813]: I0320 15:55:48.933241 4813 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-rvs9h" Mar 20 15:55:48 crc kubenswrapper[4813]: I0320 15:55:48.936361 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-7rt5m"] Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.057646 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1de6b68c-a264-4e69-8f1d-5427473d0b31-bound-sa-token\") pod \"cert-manager-545d4d4674-7rt5m\" (UID: \"1de6b68c-a264-4e69-8f1d-5427473d0b31\") " pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.058005 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8svq\" (UniqueName: \"kubernetes.io/projected/1de6b68c-a264-4e69-8f1d-5427473d0b31-kube-api-access-w8svq\") pod \"cert-manager-545d4d4674-7rt5m\" (UID: \"1de6b68c-a264-4e69-8f1d-5427473d0b31\") " pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.158948 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8svq\" (UniqueName: \"kubernetes.io/projected/1de6b68c-a264-4e69-8f1d-5427473d0b31-kube-api-access-w8svq\") pod \"cert-manager-545d4d4674-7rt5m\" (UID: \"1de6b68c-a264-4e69-8f1d-5427473d0b31\") " pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.159041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1de6b68c-a264-4e69-8f1d-5427473d0b31-bound-sa-token\") pod \"cert-manager-545d4d4674-7rt5m\" (UID: \"1de6b68c-a264-4e69-8f1d-5427473d0b31\") " pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.191289 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1de6b68c-a264-4e69-8f1d-5427473d0b31-bound-sa-token\") pod \"cert-manager-545d4d4674-7rt5m\" (UID: \"1de6b68c-a264-4e69-8f1d-5427473d0b31\") " pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.192049 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8svq\" (UniqueName: \"kubernetes.io/projected/1de6b68c-a264-4e69-8f1d-5427473d0b31-kube-api-access-w8svq\") pod \"cert-manager-545d4d4674-7rt5m\" (UID: \"1de6b68c-a264-4e69-8f1d-5427473d0b31\") " pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.247103 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-7rt5m" Mar 20 15:55:49 crc kubenswrapper[4813]: I0320 15:55:49.700133 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-7rt5m"] Mar 20 15:55:50 crc kubenswrapper[4813]: I0320 15:55:50.461924 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-7rt5m" event={"ID":"1de6b68c-a264-4e69-8f1d-5427473d0b31","Type":"ContainerStarted","Data":"0410b63749d6e26e7727e164c9f7753458851dc38eb856c4f1b4bcbb3731ccb6"} Mar 20 15:55:50 crc kubenswrapper[4813]: I0320 15:55:50.461974 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-7rt5m" event={"ID":"1de6b68c-a264-4e69-8f1d-5427473d0b31","Type":"ContainerStarted","Data":"bd71f37f210c51ceae7a996a3719b19853f224ff96ff582314dd803655bbddcd"} Mar 20 15:55:50 crc kubenswrapper[4813]: I0320 15:55:50.481164 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-545d4d4674-7rt5m" podStartSLOduration=2.481145322 podStartE2EDuration="2.481145322s" podCreationTimestamp="2026-03-20 15:55:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:55:50.475674395 +0000 UTC m=+1079.898377246" watchObservedRunningTime="2026-03-20 15:55:50.481145322 +0000 UTC m=+1079.903848163" Mar 20 15:55:52 crc kubenswrapper[4813]: I0320 15:55:52.778880 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-6888856db4-n56d9" Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.743121 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ktx2h"] Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.744544 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ktx2h" Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.746842 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.754909 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-tmkkx" Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.754974 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.768446 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ktx2h"] Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.864361 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mplgb\" (UniqueName: \"kubernetes.io/projected/48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6-kube-api-access-mplgb\") pod \"openstack-operator-index-ktx2h\" (UID: \"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6\") " pod="openstack-operators/openstack-operator-index-ktx2h" Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.966375 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mplgb\" (UniqueName: \"kubernetes.io/projected/48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6-kube-api-access-mplgb\") pod \"openstack-operator-index-ktx2h\" (UID: \"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6\") " pod="openstack-operators/openstack-operator-index-ktx2h" Mar 20 15:55:55 crc kubenswrapper[4813]: I0320 15:55:55.994160 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mplgb\" (UniqueName: \"kubernetes.io/projected/48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6-kube-api-access-mplgb\") pod \"openstack-operator-index-ktx2h\" (UID: \"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6\") " pod="openstack-operators/openstack-operator-index-ktx2h" Mar 20 15:55:56 crc kubenswrapper[4813]: I0320 15:55:56.064102 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ktx2h" Mar 20 15:55:56 crc kubenswrapper[4813]: I0320 15:55:56.481361 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ktx2h"] Mar 20 15:55:56 crc kubenswrapper[4813]: W0320 15:55:56.484696 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48a041b6_2ebc_45e0_a4a1_21aea7f2a7a6.slice/crio-f562668229b159ec6859262c6aa70dc50c48f3c297d34cca44fb705a8c6fc2fd WatchSource:0}: Error finding container f562668229b159ec6859262c6aa70dc50c48f3c297d34cca44fb705a8c6fc2fd: Status 404 returned error can't find the container with id f562668229b159ec6859262c6aa70dc50c48f3c297d34cca44fb705a8c6fc2fd Mar 20 15:55:56 crc kubenswrapper[4813]: I0320 15:55:56.509006 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ktx2h" event={"ID":"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6","Type":"ContainerStarted","Data":"f562668229b159ec6859262c6aa70dc50c48f3c297d34cca44fb705a8c6fc2fd"} Mar 20 15:55:58 crc kubenswrapper[4813]: I0320 15:55:58.325751 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-ktx2h"] Mar 20 15:55:58 crc kubenswrapper[4813]: I0320 15:55:58.930208 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-46f5x"] Mar 20 15:55:58 crc kubenswrapper[4813]: I0320 15:55:58.931407 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:55:58 crc kubenswrapper[4813]: I0320 15:55:58.942911 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-46f5x"] Mar 20 15:55:59 crc kubenswrapper[4813]: I0320 15:55:59.008474 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frhb7\" (UniqueName: \"kubernetes.io/projected/96e2a06f-6d79-4e77-af9f-a5bc8959477a-kube-api-access-frhb7\") pod \"openstack-operator-index-46f5x\" (UID: \"96e2a06f-6d79-4e77-af9f-a5bc8959477a\") " pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:55:59 crc kubenswrapper[4813]: I0320 15:55:59.109902 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frhb7\" (UniqueName: \"kubernetes.io/projected/96e2a06f-6d79-4e77-af9f-a5bc8959477a-kube-api-access-frhb7\") pod \"openstack-operator-index-46f5x\" (UID: \"96e2a06f-6d79-4e77-af9f-a5bc8959477a\") " pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:55:59 crc kubenswrapper[4813]: I0320 15:55:59.134149 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frhb7\" (UniqueName: \"kubernetes.io/projected/96e2a06f-6d79-4e77-af9f-a5bc8959477a-kube-api-access-frhb7\") pod \"openstack-operator-index-46f5x\" (UID: \"96e2a06f-6d79-4e77-af9f-a5bc8959477a\") " pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:55:59 crc kubenswrapper[4813]: I0320 15:55:59.303548 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:55:59 crc kubenswrapper[4813]: I0320 15:55:59.750577 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-46f5x"] Mar 20 15:55:59 crc kubenswrapper[4813]: W0320 15:55:59.823258 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96e2a06f_6d79_4e77_af9f_a5bc8959477a.slice/crio-798f871c0494ede92906a14bda88d8b81825646f756464d7496202a985528f42 WatchSource:0}: Error finding container 798f871c0494ede92906a14bda88d8b81825646f756464d7496202a985528f42: Status 404 returned error can't find the container with id 798f871c0494ede92906a14bda88d8b81825646f756464d7496202a985528f42 Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.122343 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567036-whmmc"] Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.123174 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567036-whmmc" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.126326 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.126692 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.127628 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.183357 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567036-whmmc"] Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.223427 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9f6x\" (UniqueName: \"kubernetes.io/projected/e821fe55-f415-430a-be0c-d082bd778340-kube-api-access-l9f6x\") pod \"auto-csr-approver-29567036-whmmc\" (UID: \"e821fe55-f415-430a-be0c-d082bd778340\") " pod="openshift-infra/auto-csr-approver-29567036-whmmc" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.324995 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9f6x\" (UniqueName: \"kubernetes.io/projected/e821fe55-f415-430a-be0c-d082bd778340-kube-api-access-l9f6x\") pod \"auto-csr-approver-29567036-whmmc\" (UID: \"e821fe55-f415-430a-be0c-d082bd778340\") " pod="openshift-infra/auto-csr-approver-29567036-whmmc" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.351906 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9f6x\" (UniqueName: \"kubernetes.io/projected/e821fe55-f415-430a-be0c-d082bd778340-kube-api-access-l9f6x\") pod \"auto-csr-approver-29567036-whmmc\" (UID: \"e821fe55-f415-430a-be0c-d082bd778340\") " pod="openshift-infra/auto-csr-approver-29567036-whmmc" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.495831 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567036-whmmc" Mar 20 15:56:00 crc kubenswrapper[4813]: I0320 15:56:00.535468 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-46f5x" event={"ID":"96e2a06f-6d79-4e77-af9f-a5bc8959477a","Type":"ContainerStarted","Data":"798f871c0494ede92906a14bda88d8b81825646f756464d7496202a985528f42"} Mar 20 15:56:01 crc kubenswrapper[4813]: I0320 15:56:01.155225 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567036-whmmc"] Mar 20 15:56:01 crc kubenswrapper[4813]: I0320 15:56:01.543097 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567036-whmmc" event={"ID":"e821fe55-f415-430a-be0c-d082bd778340","Type":"ContainerStarted","Data":"9e7b877d25817d67fb5cfa69b30f0f4ab75036c2a32102f53624cbfce78ae978"} Mar 20 15:56:02 crc kubenswrapper[4813]: I0320 15:56:02.560660 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-46f5x" event={"ID":"96e2a06f-6d79-4e77-af9f-a5bc8959477a","Type":"ContainerStarted","Data":"be334df79932f949353080bf7e8385dc36c9b350f2e7a205ab40cfd7ca15931b"} Mar 20 15:56:02 crc kubenswrapper[4813]: I0320 15:56:02.562710 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ktx2h" event={"ID":"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6","Type":"ContainerStarted","Data":"6b4a09e4cc381b2a119fec3135b2993692e07ef86e25af6e5375cc76503f2963"} Mar 20 15:56:02 crc kubenswrapper[4813]: I0320 15:56:02.562900 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-ktx2h" podUID="48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6" containerName="registry-server" containerID="cri-o://6b4a09e4cc381b2a119fec3135b2993692e07ef86e25af6e5375cc76503f2963" gracePeriod=2 Mar 20 15:56:02 crc kubenswrapper[4813]: I0320 15:56:02.587858 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-46f5x" podStartSLOduration=2.307522471 podStartE2EDuration="4.587831749s" podCreationTimestamp="2026-03-20 15:55:58 +0000 UTC" firstStartedPulling="2026-03-20 15:55:59.829121726 +0000 UTC m=+1089.251824567" lastFinishedPulling="2026-03-20 15:56:02.109431004 +0000 UTC m=+1091.532133845" observedRunningTime="2026-03-20 15:56:02.586541875 +0000 UTC m=+1092.009244756" watchObservedRunningTime="2026-03-20 15:56:02.587831749 +0000 UTC m=+1092.010534600" Mar 20 15:56:02 crc kubenswrapper[4813]: I0320 15:56:02.614888 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ktx2h" podStartSLOduration=1.985084689 podStartE2EDuration="7.614855935s" podCreationTimestamp="2026-03-20 15:55:55 +0000 UTC" firstStartedPulling="2026-03-20 15:55:56.488059713 +0000 UTC m=+1085.910762554" lastFinishedPulling="2026-03-20 15:56:02.117830959 +0000 UTC m=+1091.540533800" observedRunningTime="2026-03-20 15:56:02.603311615 +0000 UTC m=+1092.026014476" watchObservedRunningTime="2026-03-20 15:56:02.614855935 +0000 UTC m=+1092.037558816" Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.569855 4813 generic.go:334] "Generic (PLEG): container finished" podID="48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6" containerID="6b4a09e4cc381b2a119fec3135b2993692e07ef86e25af6e5375cc76503f2963" exitCode=0 Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.569960 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ktx2h" event={"ID":"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6","Type":"ContainerDied","Data":"6b4a09e4cc381b2a119fec3135b2993692e07ef86e25af6e5375cc76503f2963"} Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.570177 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ktx2h" event={"ID":"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6","Type":"ContainerDied","Data":"f562668229b159ec6859262c6aa70dc50c48f3c297d34cca44fb705a8c6fc2fd"} Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.570196 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f562668229b159ec6859262c6aa70dc50c48f3c297d34cca44fb705a8c6fc2fd" Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.592249 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ktx2h" Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.673054 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mplgb\" (UniqueName: \"kubernetes.io/projected/48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6-kube-api-access-mplgb\") pod \"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6\" (UID: \"48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6\") " Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.685723 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6-kube-api-access-mplgb" (OuterVolumeSpecName: "kube-api-access-mplgb") pod "48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6" (UID: "48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6"). InnerVolumeSpecName "kube-api-access-mplgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.774699 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mplgb\" (UniqueName: \"kubernetes.io/projected/48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6-kube-api-access-mplgb\") on node \"crc\" DevicePath \"\"" Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.842645 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:56:03 crc kubenswrapper[4813]: I0320 15:56:03.843011 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:56:04 crc kubenswrapper[4813]: I0320 15:56:04.577903 4813 generic.go:334] "Generic (PLEG): container finished" podID="e821fe55-f415-430a-be0c-d082bd778340" containerID="f996c5b2ba397eaee49a56bce470a1e2ec56d25a1d14a175e258cbf3cc9671d9" exitCode=0 Mar 20 15:56:04 crc kubenswrapper[4813]: I0320 15:56:04.577975 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ktx2h" Mar 20 15:56:04 crc kubenswrapper[4813]: I0320 15:56:04.579708 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567036-whmmc" event={"ID":"e821fe55-f415-430a-be0c-d082bd778340","Type":"ContainerDied","Data":"f996c5b2ba397eaee49a56bce470a1e2ec56d25a1d14a175e258cbf3cc9671d9"} Mar 20 15:56:04 crc kubenswrapper[4813]: I0320 15:56:04.610239 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-ktx2h"] Mar 20 15:56:04 crc kubenswrapper[4813]: I0320 15:56:04.615861 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-ktx2h"] Mar 20 15:56:05 crc kubenswrapper[4813]: I0320 15:56:05.276169 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6" path="/var/lib/kubelet/pods/48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6/volumes" Mar 20 15:56:05 crc kubenswrapper[4813]: I0320 15:56:05.862538 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567036-whmmc" Mar 20 15:56:05 crc kubenswrapper[4813]: I0320 15:56:05.901740 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9f6x\" (UniqueName: \"kubernetes.io/projected/e821fe55-f415-430a-be0c-d082bd778340-kube-api-access-l9f6x\") pod \"e821fe55-f415-430a-be0c-d082bd778340\" (UID: \"e821fe55-f415-430a-be0c-d082bd778340\") " Mar 20 15:56:05 crc kubenswrapper[4813]: I0320 15:56:05.906876 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e821fe55-f415-430a-be0c-d082bd778340-kube-api-access-l9f6x" (OuterVolumeSpecName: "kube-api-access-l9f6x") pod "e821fe55-f415-430a-be0c-d082bd778340" (UID: "e821fe55-f415-430a-be0c-d082bd778340"). InnerVolumeSpecName "kube-api-access-l9f6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:56:06 crc kubenswrapper[4813]: I0320 15:56:06.003608 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9f6x\" (UniqueName: \"kubernetes.io/projected/e821fe55-f415-430a-be0c-d082bd778340-kube-api-access-l9f6x\") on node \"crc\" DevicePath \"\"" Mar 20 15:56:06 crc kubenswrapper[4813]: I0320 15:56:06.592960 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567036-whmmc" event={"ID":"e821fe55-f415-430a-be0c-d082bd778340","Type":"ContainerDied","Data":"9e7b877d25817d67fb5cfa69b30f0f4ab75036c2a32102f53624cbfce78ae978"} Mar 20 15:56:06 crc kubenswrapper[4813]: I0320 15:56:06.592995 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e7b877d25817d67fb5cfa69b30f0f4ab75036c2a32102f53624cbfce78ae978" Mar 20 15:56:06 crc kubenswrapper[4813]: I0320 15:56:06.593125 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567036-whmmc" Mar 20 15:56:06 crc kubenswrapper[4813]: I0320 15:56:06.921521 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567030-mrdjh"] Mar 20 15:56:06 crc kubenswrapper[4813]: I0320 15:56:06.927588 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567030-mrdjh"] Mar 20 15:56:07 crc kubenswrapper[4813]: I0320 15:56:07.279221 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e55fc1c-85ca-43da-9c6a-e774ad82bdb0" path="/var/lib/kubelet/pods/1e55fc1c-85ca-43da-9c6a-e774ad82bdb0/volumes" Mar 20 15:56:09 crc kubenswrapper[4813]: I0320 15:56:09.303857 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:56:09 crc kubenswrapper[4813]: I0320 15:56:09.306854 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:56:09 crc kubenswrapper[4813]: I0320 15:56:09.347692 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:56:09 crc kubenswrapper[4813]: I0320 15:56:09.660797 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-46f5x" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.580624 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb"] Mar 20 15:56:16 crc kubenswrapper[4813]: E0320 15:56:16.581740 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e821fe55-f415-430a-be0c-d082bd778340" containerName="oc" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.581772 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e821fe55-f415-430a-be0c-d082bd778340" containerName="oc" Mar 20 15:56:16 crc kubenswrapper[4813]: E0320 15:56:16.581798 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6" containerName="registry-server" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.581815 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6" containerName="registry-server" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.582054 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a041b6-2ebc-45e0-a4a1-21aea7f2a7a6" containerName="registry-server" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.582092 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e821fe55-f415-430a-be0c-d082bd778340" containerName="oc" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.583336 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.586013 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-vzwcd" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.598395 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb"] Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.657292 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-przdb\" (UniqueName: \"kubernetes.io/projected/5ad3b123-515a-46c7-be27-6bf55d5823d0-kube-api-access-przdb\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.657398 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-bundle\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.657699 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-util\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.759398 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-util\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.759528 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-przdb\" (UniqueName: \"kubernetes.io/projected/5ad3b123-515a-46c7-be27-6bf55d5823d0-kube-api-access-przdb\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.759588 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-bundle\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.760386 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-util\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.760447 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-bundle\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.784335 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-przdb\" (UniqueName: \"kubernetes.io/projected/5ad3b123-515a-46c7-be27-6bf55d5823d0-kube-api-access-przdb\") pod \"6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:16 crc kubenswrapper[4813]: I0320 15:56:16.907734 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:17 crc kubenswrapper[4813]: I0320 15:56:17.377323 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb"] Mar 20 15:56:17 crc kubenswrapper[4813]: W0320 15:56:17.386109 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ad3b123_515a_46c7_be27_6bf55d5823d0.slice/crio-266b5e02e6b27d7aa8c25202de965196de5d6dfdef3d2d727af1a077e54db5d9 WatchSource:0}: Error finding container 266b5e02e6b27d7aa8c25202de965196de5d6dfdef3d2d727af1a077e54db5d9: Status 404 returned error can't find the container with id 266b5e02e6b27d7aa8c25202de965196de5d6dfdef3d2d727af1a077e54db5d9 Mar 20 15:56:17 crc kubenswrapper[4813]: I0320 15:56:17.688364 4813 generic.go:334] "Generic (PLEG): container finished" podID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerID="947b70aaae07ff2b907ef58d0f4fbbbf9ca2f2b885adc86e6b5c584a4bd30bc7" exitCode=0 Mar 20 15:56:17 crc kubenswrapper[4813]: I0320 15:56:17.688404 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" event={"ID":"5ad3b123-515a-46c7-be27-6bf55d5823d0","Type":"ContainerDied","Data":"947b70aaae07ff2b907ef58d0f4fbbbf9ca2f2b885adc86e6b5c584a4bd30bc7"} Mar 20 15:56:17 crc kubenswrapper[4813]: I0320 15:56:17.688426 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" event={"ID":"5ad3b123-515a-46c7-be27-6bf55d5823d0","Type":"ContainerStarted","Data":"266b5e02e6b27d7aa8c25202de965196de5d6dfdef3d2d727af1a077e54db5d9"} Mar 20 15:56:18 crc kubenswrapper[4813]: I0320 15:56:18.696828 4813 generic.go:334] "Generic (PLEG): container finished" podID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerID="49a13cf3537d29918bdb188912a9d4a4538715acb45d628c5c22628a4465b962" exitCode=0 Mar 20 15:56:18 crc kubenswrapper[4813]: I0320 15:56:18.696885 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" event={"ID":"5ad3b123-515a-46c7-be27-6bf55d5823d0","Type":"ContainerDied","Data":"49a13cf3537d29918bdb188912a9d4a4538715acb45d628c5c22628a4465b962"} Mar 20 15:56:19 crc kubenswrapper[4813]: I0320 15:56:19.707902 4813 generic.go:334] "Generic (PLEG): container finished" podID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerID="d6dd1a1a8f846ee8173c0bbea4b4fbebe68fe3be1cdca37a96efbc036f61ba9a" exitCode=0 Mar 20 15:56:19 crc kubenswrapper[4813]: I0320 15:56:19.707957 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" event={"ID":"5ad3b123-515a-46c7-be27-6bf55d5823d0","Type":"ContainerDied","Data":"d6dd1a1a8f846ee8173c0bbea4b4fbebe68fe3be1cdca37a96efbc036f61ba9a"} Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.015930 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.047373 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-bundle\") pod \"5ad3b123-515a-46c7-be27-6bf55d5823d0\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.047471 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-util\") pod \"5ad3b123-515a-46c7-be27-6bf55d5823d0\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.047556 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-przdb\" (UniqueName: \"kubernetes.io/projected/5ad3b123-515a-46c7-be27-6bf55d5823d0-kube-api-access-przdb\") pod \"5ad3b123-515a-46c7-be27-6bf55d5823d0\" (UID: \"5ad3b123-515a-46c7-be27-6bf55d5823d0\") " Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.048350 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-bundle" (OuterVolumeSpecName: "bundle") pod "5ad3b123-515a-46c7-be27-6bf55d5823d0" (UID: "5ad3b123-515a-46c7-be27-6bf55d5823d0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.055747 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ad3b123-515a-46c7-be27-6bf55d5823d0-kube-api-access-przdb" (OuterVolumeSpecName: "kube-api-access-przdb") pod "5ad3b123-515a-46c7-be27-6bf55d5823d0" (UID: "5ad3b123-515a-46c7-be27-6bf55d5823d0"). InnerVolumeSpecName "kube-api-access-przdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.066760 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-util" (OuterVolumeSpecName: "util") pod "5ad3b123-515a-46c7-be27-6bf55d5823d0" (UID: "5ad3b123-515a-46c7-be27-6bf55d5823d0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.151822 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.151903 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ad3b123-515a-46c7-be27-6bf55d5823d0-util\") on node \"crc\" DevicePath \"\"" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.151921 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-przdb\" (UniqueName: \"kubernetes.io/projected/5ad3b123-515a-46c7-be27-6bf55d5823d0-kube-api-access-przdb\") on node \"crc\" DevicePath \"\"" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.729438 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" event={"ID":"5ad3b123-515a-46c7-be27-6bf55d5823d0","Type":"ContainerDied","Data":"266b5e02e6b27d7aa8c25202de965196de5d6dfdef3d2d727af1a077e54db5d9"} Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.729504 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb" Mar 20 15:56:21 crc kubenswrapper[4813]: I0320 15:56:21.729517 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="266b5e02e6b27d7aa8c25202de965196de5d6dfdef3d2d727af1a077e54db5d9" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.769122 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-646f48576b-bmc92"] Mar 20 15:56:28 crc kubenswrapper[4813]: E0320 15:56:28.770134 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerName="util" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.770157 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerName="util" Mar 20 15:56:28 crc kubenswrapper[4813]: E0320 15:56:28.770192 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerName="pull" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.770202 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerName="pull" Mar 20 15:56:28 crc kubenswrapper[4813]: E0320 15:56:28.770223 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerName="extract" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.770235 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerName="extract" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.770435 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ad3b123-515a-46c7-be27-6bf55d5823d0" containerName="extract" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.771160 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.774437 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-7mc7m" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.803433 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-646f48576b-bmc92"] Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.853291 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whldn\" (UniqueName: \"kubernetes.io/projected/eee8c4ad-1041-4c34-8c8f-b2feb75fbee8-kube-api-access-whldn\") pod \"openstack-operator-controller-init-646f48576b-bmc92\" (UID: \"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8\") " pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.954104 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whldn\" (UniqueName: \"kubernetes.io/projected/eee8c4ad-1041-4c34-8c8f-b2feb75fbee8-kube-api-access-whldn\") pod \"openstack-operator-controller-init-646f48576b-bmc92\" (UID: \"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8\") " pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:56:28 crc kubenswrapper[4813]: I0320 15:56:28.973555 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whldn\" (UniqueName: \"kubernetes.io/projected/eee8c4ad-1041-4c34-8c8f-b2feb75fbee8-kube-api-access-whldn\") pod \"openstack-operator-controller-init-646f48576b-bmc92\" (UID: \"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8\") " pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:56:29 crc kubenswrapper[4813]: I0320 15:56:29.092355 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:56:29 crc kubenswrapper[4813]: I0320 15:56:29.565231 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-646f48576b-bmc92"] Mar 20 15:56:29 crc kubenswrapper[4813]: W0320 15:56:29.576861 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeee8c4ad_1041_4c34_8c8f_b2feb75fbee8.slice/crio-363b202f4726359de8aa427a8a5160becb20bea1906e4f85296980a0ef070225 WatchSource:0}: Error finding container 363b202f4726359de8aa427a8a5160becb20bea1906e4f85296980a0ef070225: Status 404 returned error can't find the container with id 363b202f4726359de8aa427a8a5160becb20bea1906e4f85296980a0ef070225 Mar 20 15:56:29 crc kubenswrapper[4813]: I0320 15:56:29.790783 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" event={"ID":"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8","Type":"ContainerStarted","Data":"363b202f4726359de8aa427a8a5160becb20bea1906e4f85296980a0ef070225"} Mar 20 15:56:33 crc kubenswrapper[4813]: I0320 15:56:33.826073 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" event={"ID":"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8","Type":"ContainerStarted","Data":"a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c"} Mar 20 15:56:33 crc kubenswrapper[4813]: I0320 15:56:33.826815 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:56:33 crc kubenswrapper[4813]: I0320 15:56:33.843032 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:56:33 crc kubenswrapper[4813]: I0320 15:56:33.843102 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:56:33 crc kubenswrapper[4813]: I0320 15:56:33.876907 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" podStartSLOduration=2.391596592 podStartE2EDuration="5.876873986s" podCreationTimestamp="2026-03-20 15:56:28 +0000 UTC" firstStartedPulling="2026-03-20 15:56:29.579796146 +0000 UTC m=+1119.002498987" lastFinishedPulling="2026-03-20 15:56:33.06507354 +0000 UTC m=+1122.487776381" observedRunningTime="2026-03-20 15:56:33.870055081 +0000 UTC m=+1123.292757992" watchObservedRunningTime="2026-03-20 15:56:33.876873986 +0000 UTC m=+1123.299576867" Mar 20 15:56:39 crc kubenswrapper[4813]: I0320 15:56:39.094713 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.926429 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5"] Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.927970 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.942164 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-mjh5n" Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.959440 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t428q\" (UniqueName: \"kubernetes.io/projected/7c7640f5-78e7-4dba-8900-26dc47eb640f-kube-api-access-t428q\") pod \"barbican-operator-controller-manager-59bc569d95-2l2s5\" (UID: \"7c7640f5-78e7-4dba-8900-26dc47eb640f\") " pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.964656 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq"] Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.967066 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.975611 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-vmct5" Mar 20 15:56:58 crc kubenswrapper[4813]: I0320 15:56:58.994999 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.011518 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.012578 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.015424 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-p68zq" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.024589 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.050257 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.060574 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t428q\" (UniqueName: \"kubernetes.io/projected/7c7640f5-78e7-4dba-8900-26dc47eb640f-kube-api-access-t428q\") pod \"barbican-operator-controller-manager-59bc569d95-2l2s5\" (UID: \"7c7640f5-78e7-4dba-8900-26dc47eb640f\") " pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.064551 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.065723 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.070932 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-rmkqv" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.072115 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.073223 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.078592 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-fvgzl" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.085808 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.093873 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t428q\" (UniqueName: \"kubernetes.io/projected/7c7640f5-78e7-4dba-8900-26dc47eb640f-kube-api-access-t428q\") pod \"barbican-operator-controller-manager-59bc569d95-2l2s5\" (UID: \"7c7640f5-78e7-4dba-8900-26dc47eb640f\") " pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.110345 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.113062 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.114554 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.117839 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-bdv6g" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.129080 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.162560 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.163808 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.165136 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx2xr\" (UniqueName: \"kubernetes.io/projected/6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe-kube-api-access-vx2xr\") pod \"designate-operator-controller-manager-588d4d986b-lzq5s\" (UID: \"6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe\") " pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.165204 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qdz7\" (UniqueName: \"kubernetes.io/projected/f0d2c31f-5223-4222-afa8-ba918af23dca-kube-api-access-9qdz7\") pod \"cinder-operator-controller-manager-8d58dc466-q88gq\" (UID: \"f0d2c31f-5223-4222-afa8-ba918af23dca\") " pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.169838 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.170004 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.170067 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-wknvl" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.170868 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.175001 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-62qhw" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.186544 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.201712 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.202786 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.207524 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-rvhc6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.214855 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.218186 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.225563 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.226401 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.229798 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-l7p94" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.244877 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.263550 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.264417 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.269315 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvq4r\" (UniqueName: \"kubernetes.io/projected/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-kube-api-access-bvq4r\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.269356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgz55\" (UniqueName: \"kubernetes.io/projected/be5e97ef-9ad5-4663-bbf1-69573e2eedeb-kube-api-access-xgz55\") pod \"horizon-operator-controller-manager-8464cc45fb-v8t7w\" (UID: \"be5e97ef-9ad5-4663-bbf1-69573e2eedeb\") " pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.269386 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6xvl\" (UniqueName: \"kubernetes.io/projected/829ecd99-4a08-4965-ab30-fb30ab8e2ead-kube-api-access-g6xvl\") pod \"heat-operator-controller-manager-67dd5f86f5-mqm85\" (UID: \"829ecd99-4a08-4965-ab30-fb30ab8e2ead\") " pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.269427 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx2xr\" (UniqueName: \"kubernetes.io/projected/6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe-kube-api-access-vx2xr\") pod \"designate-operator-controller-manager-588d4d986b-lzq5s\" (UID: \"6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe\") " pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.269495 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qdz7\" (UniqueName: \"kubernetes.io/projected/f0d2c31f-5223-4222-afa8-ba918af23dca-kube-api-access-9qdz7\") pod \"cinder-operator-controller-manager-8d58dc466-q88gq\" (UID: \"f0d2c31f-5223-4222-afa8-ba918af23dca\") " pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.269521 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c77l2\" (UniqueName: \"kubernetes.io/projected/200b39b4-9995-48fc-a31f-1708526bd9d8-kube-api-access-c77l2\") pod \"glance-operator-controller-manager-79df6bcc97-z8zt6\" (UID: \"200b39b4-9995-48fc-a31f-1708526bd9d8\") " pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.269538 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.270728 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.272919 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4b79s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.277791 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.294443 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-767865f676-fstk4"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.303155 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.318800 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-bf6k2" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.325789 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx2xr\" (UniqueName: \"kubernetes.io/projected/6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe-kube-api-access-vx2xr\") pod \"designate-operator-controller-manager-588d4d986b-lzq5s\" (UID: \"6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe\") " pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.326196 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qdz7\" (UniqueName: \"kubernetes.io/projected/f0d2c31f-5223-4222-afa8-ba918af23dca-kube-api-access-9qdz7\") pod \"cinder-operator-controller-manager-8d58dc466-q88gq\" (UID: \"f0d2c31f-5223-4222-afa8-ba918af23dca\") " pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.328412 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.331619 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-767865f676-fstk4"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.370840 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.371605 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375632 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhd2g\" (UniqueName: \"kubernetes.io/projected/a314b9f5-f2d9-445c-bf5d-a42dc479c21d-kube-api-access-xhd2g\") pod \"mariadb-operator-controller-manager-67ccfc9778-cff9z\" (UID: \"a314b9f5-f2d9-445c-bf5d-a42dc479c21d\") " pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375671 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvq4r\" (UniqueName: \"kubernetes.io/projected/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-kube-api-access-bvq4r\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375693 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgz55\" (UniqueName: \"kubernetes.io/projected/be5e97ef-9ad5-4663-bbf1-69573e2eedeb-kube-api-access-xgz55\") pod \"horizon-operator-controller-manager-8464cc45fb-v8t7w\" (UID: \"be5e97ef-9ad5-4663-bbf1-69573e2eedeb\") " pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375713 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvqzf\" (UniqueName: \"kubernetes.io/projected/7f918684-13af-4141-a414-9ac7b87e75d9-kube-api-access-kvqzf\") pod \"ironic-operator-controller-manager-6f787dddc9-r7v2l\" (UID: \"7f918684-13af-4141-a414-9ac7b87e75d9\") " pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375729 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6xvl\" (UniqueName: \"kubernetes.io/projected/829ecd99-4a08-4965-ab30-fb30ab8e2ead-kube-api-access-g6xvl\") pod \"heat-operator-controller-manager-67dd5f86f5-mqm85\" (UID: \"829ecd99-4a08-4965-ab30-fb30ab8e2ead\") " pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375758 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqjbg\" (UniqueName: \"kubernetes.io/projected/6c1e7d53-7b72-4c38-bca1-94db6fd742d2-kube-api-access-mqjbg\") pod \"nova-operator-controller-manager-5d488d59fb-vj727\" (UID: \"6c1e7d53-7b72-4c38-bca1-94db6fd742d2\") " pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375782 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc7sk\" (UniqueName: \"kubernetes.io/projected/aee070f5-c22b-4b69-b116-685316825aaa-kube-api-access-fc7sk\") pod \"neutron-operator-controller-manager-767865f676-fstk4\" (UID: \"aee070f5-c22b-4b69-b116-685316825aaa\") " pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c77l2\" (UniqueName: \"kubernetes.io/projected/200b39b4-9995-48fc-a31f-1708526bd9d8-kube-api-access-c77l2\") pod \"glance-operator-controller-manager-79df6bcc97-z8zt6\" (UID: \"200b39b4-9995-48fc-a31f-1708526bd9d8\") " pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375878 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hm6wj\" (UniqueName: \"kubernetes.io/projected/35420d07-3f39-47f7-bc13-d5fc95954674-kube-api-access-hm6wj\") pod \"keystone-operator-controller-manager-768b96df4c-kvw9r\" (UID: \"35420d07-3f39-47f7-bc13-d5fc95954674\") " pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375898 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.375929 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvgxz\" (UniqueName: \"kubernetes.io/projected/94489fb6-8195-4820-b4bf-87122803836a-kube-api-access-cvgxz\") pod \"manila-operator-controller-manager-55f864c847-6xnn6\" (UID: \"94489fb6-8195-4820-b4bf-87122803836a\") " pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" Mar 20 15:56:59 crc kubenswrapper[4813]: E0320 15:56:59.380275 4813 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 15:56:59 crc kubenswrapper[4813]: E0320 15:56:59.380336 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert podName:bcb8373c-ae7c-4646-a90a-fe965f70c9bd nodeName:}" failed. No retries permitted until 2026-03-20 15:56:59.880319039 +0000 UTC m=+1149.303021880 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert") pod "infra-operator-controller-manager-7b9c774f96-qzktk" (UID: "bcb8373c-ae7c-4646-a90a-fe965f70c9bd") : secret "infra-operator-webhook-server-cert" not found Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.382069 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-hwr85" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.397493 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.398300 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.422518 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-5kcvt" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.444917 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.449729 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6xvl\" (UniqueName: \"kubernetes.io/projected/829ecd99-4a08-4965-ab30-fb30ab8e2ead-kube-api-access-g6xvl\") pod \"heat-operator-controller-manager-67dd5f86f5-mqm85\" (UID: \"829ecd99-4a08-4965-ab30-fb30ab8e2ead\") " pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.464775 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.476078 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgz55\" (UniqueName: \"kubernetes.io/projected/be5e97ef-9ad5-4663-bbf1-69573e2eedeb-kube-api-access-xgz55\") pod \"horizon-operator-controller-manager-8464cc45fb-v8t7w\" (UID: \"be5e97ef-9ad5-4663-bbf1-69573e2eedeb\") " pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.477349 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hm6wj\" (UniqueName: \"kubernetes.io/projected/35420d07-3f39-47f7-bc13-d5fc95954674-kube-api-access-hm6wj\") pod \"keystone-operator-controller-manager-768b96df4c-kvw9r\" (UID: \"35420d07-3f39-47f7-bc13-d5fc95954674\") " pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.477388 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvgxz\" (UniqueName: \"kubernetes.io/projected/94489fb6-8195-4820-b4bf-87122803836a-kube-api-access-cvgxz\") pod \"manila-operator-controller-manager-55f864c847-6xnn6\" (UID: \"94489fb6-8195-4820-b4bf-87122803836a\") " pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.477423 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhd2g\" (UniqueName: \"kubernetes.io/projected/a314b9f5-f2d9-445c-bf5d-a42dc479c21d-kube-api-access-xhd2g\") pod \"mariadb-operator-controller-manager-67ccfc9778-cff9z\" (UID: \"a314b9f5-f2d9-445c-bf5d-a42dc479c21d\") " pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.477452 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvqzf\" (UniqueName: \"kubernetes.io/projected/7f918684-13af-4141-a414-9ac7b87e75d9-kube-api-access-kvqzf\") pod \"ironic-operator-controller-manager-6f787dddc9-r7v2l\" (UID: \"7f918684-13af-4141-a414-9ac7b87e75d9\") " pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.477473 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqjbg\" (UniqueName: \"kubernetes.io/projected/6c1e7d53-7b72-4c38-bca1-94db6fd742d2-kube-api-access-mqjbg\") pod \"nova-operator-controller-manager-5d488d59fb-vj727\" (UID: \"6c1e7d53-7b72-4c38-bca1-94db6fd742d2\") " pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.477513 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc7sk\" (UniqueName: \"kubernetes.io/projected/aee070f5-c22b-4b69-b116-685316825aaa-kube-api-access-fc7sk\") pod \"neutron-operator-controller-manager-767865f676-fstk4\" (UID: \"aee070f5-c22b-4b69-b116-685316825aaa\") " pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.479264 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c77l2\" (UniqueName: \"kubernetes.io/projected/200b39b4-9995-48fc-a31f-1708526bd9d8-kube-api-access-c77l2\") pod \"glance-operator-controller-manager-79df6bcc97-z8zt6\" (UID: \"200b39b4-9995-48fc-a31f-1708526bd9d8\") " pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.496231 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvq4r\" (UniqueName: \"kubernetes.io/projected/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-kube-api-access-bvq4r\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.527318 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvqzf\" (UniqueName: \"kubernetes.io/projected/7f918684-13af-4141-a414-9ac7b87e75d9-kube-api-access-kvqzf\") pod \"ironic-operator-controller-manager-6f787dddc9-r7v2l\" (UID: \"7f918684-13af-4141-a414-9ac7b87e75d9\") " pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.539413 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhd2g\" (UniqueName: \"kubernetes.io/projected/a314b9f5-f2d9-445c-bf5d-a42dc479c21d-kube-api-access-xhd2g\") pod \"mariadb-operator-controller-manager-67ccfc9778-cff9z\" (UID: \"a314b9f5-f2d9-445c-bf5d-a42dc479c21d\") " pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.553795 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc7sk\" (UniqueName: \"kubernetes.io/projected/aee070f5-c22b-4b69-b116-685316825aaa-kube-api-access-fc7sk\") pod \"neutron-operator-controller-manager-767865f676-fstk4\" (UID: \"aee070f5-c22b-4b69-b116-685316825aaa\") " pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.570135 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvgxz\" (UniqueName: \"kubernetes.io/projected/94489fb6-8195-4820-b4bf-87122803836a-kube-api-access-cvgxz\") pod \"manila-operator-controller-manager-55f864c847-6xnn6\" (UID: \"94489fb6-8195-4820-b4bf-87122803836a\") " pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.591680 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.593576 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hm6wj\" (UniqueName: \"kubernetes.io/projected/35420d07-3f39-47f7-bc13-d5fc95954674-kube-api-access-hm6wj\") pod \"keystone-operator-controller-manager-768b96df4c-kvw9r\" (UID: \"35420d07-3f39-47f7-bc13-d5fc95954674\") " pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.594842 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2cnr\" (UniqueName: \"kubernetes.io/projected/e36b96c5-7bfb-4657-b69b-7eaeeff3b477-kube-api-access-j2cnr\") pod \"octavia-operator-controller-manager-5b9f45d989-rncs2\" (UID: \"e36b96c5-7bfb-4657-b69b-7eaeeff3b477\") " pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.595305 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.596588 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.621033 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.632006 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-2rswk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.643156 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqjbg\" (UniqueName: \"kubernetes.io/projected/6c1e7d53-7b72-4c38-bca1-94db6fd742d2-kube-api-access-mqjbg\") pod \"nova-operator-controller-manager-5d488d59fb-vj727\" (UID: \"6c1e7d53-7b72-4c38-bca1-94db6fd742d2\") " pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.668037 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.668964 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.680258 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.685016 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-9rp76" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.685689 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.688773 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.696135 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2cnr\" (UniqueName: \"kubernetes.io/projected/e36b96c5-7bfb-4657-b69b-7eaeeff3b477-kube-api-access-j2cnr\") pod \"octavia-operator-controller-manager-5b9f45d989-rncs2\" (UID: \"e36b96c5-7bfb-4657-b69b-7eaeeff3b477\") " pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.722892 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.723015 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2cnr\" (UniqueName: \"kubernetes.io/projected/e36b96c5-7bfb-4657-b69b-7eaeeff3b477-kube-api-access-j2cnr\") pod \"octavia-operator-controller-manager-5b9f45d989-rncs2\" (UID: \"e36b96c5-7bfb-4657-b69b-7eaeeff3b477\") " pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.735693 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.736866 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.739167 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.739319 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-jqrmp" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.739577 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.743589 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-c674c5965-nqn84"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.744950 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.746992 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-6z8h7" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.758414 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-c674c5965-nqn84"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.768240 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.791074 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.791925 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.794070 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-nch48" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.799366 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whqhl\" (UniqueName: \"kubernetes.io/projected/3e11ecc5-9159-4144-a583-1b657f2349ea-kube-api-access-whqhl\") pod \"ovn-operator-controller-manager-884679f54-xvk2s\" (UID: \"3e11ecc5-9159-4144-a583-1b657f2349ea\") " pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.799406 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.799446 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvtpx\" (UniqueName: \"kubernetes.io/projected/1847c24e-09bc-44da-9343-5ae3f93c1dd1-kube-api-access-nvtpx\") pod \"placement-operator-controller-manager-5784578c99-gpxm6\" (UID: \"1847c24e-09bc-44da-9343-5ae3f93c1dd1\") " pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.799465 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnc2j\" (UniqueName: \"kubernetes.io/projected/a1747eb3-27f1-4f93-85c4-d786b8730bc9-kube-api-access-xnc2j\") pod \"swift-operator-controller-manager-c674c5965-nqn84\" (UID: \"a1747eb3-27f1-4f93-85c4-d786b8730bc9\") " pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.799499 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfpfd\" (UniqueName: \"kubernetes.io/projected/31acb64e-576e-4b51-a8a3-37162d9161c0-kube-api-access-vfpfd\") pod \"telemetry-operator-controller-manager-d6b694c5-2xj66\" (UID: \"31acb64e-576e-4b51-a8a3-37162d9161c0\") " pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.799555 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkdtq\" (UniqueName: \"kubernetes.io/projected/7a422362-8c7e-4943-ad13-ca4089978ef9-kube-api-access-hkdtq\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.807903 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.823900 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.829915 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.839693 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.845785 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.856051 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.858476 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.859622 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.862881 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-ngggp" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.874777 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.884058 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.885198 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.887873 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-vsh7l" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.892239 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.900930 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqn8c\" (UniqueName: \"kubernetes.io/projected/758fbe07-6d40-4287-8b3b-e1c88bcb9665-kube-api-access-qqn8c\") pod \"watcher-operator-controller-manager-6c5858c67b-t8b5h\" (UID: \"758fbe07-6d40-4287-8b3b-e1c88bcb9665\") " pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.900973 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj24k\" (UniqueName: \"kubernetes.io/projected/2ce265a6-3022-420a-99dc-0cead55c568f-kube-api-access-wj24k\") pod \"test-operator-controller-manager-5c5cb9c4d7-5rxh5\" (UID: \"2ce265a6-3022-420a-99dc-0cead55c568f\") " pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901004 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whqhl\" (UniqueName: \"kubernetes.io/projected/3e11ecc5-9159-4144-a583-1b657f2349ea-kube-api-access-whqhl\") pod \"ovn-operator-controller-manager-884679f54-xvk2s\" (UID: \"3e11ecc5-9159-4144-a583-1b657f2349ea\") " pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901027 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901078 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvtpx\" (UniqueName: \"kubernetes.io/projected/1847c24e-09bc-44da-9343-5ae3f93c1dd1-kube-api-access-nvtpx\") pod \"placement-operator-controller-manager-5784578c99-gpxm6\" (UID: \"1847c24e-09bc-44da-9343-5ae3f93c1dd1\") " pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" Mar 20 15:56:59 crc kubenswrapper[4813]: E0320 15:56:59.901182 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:56:59 crc kubenswrapper[4813]: E0320 15:56:59.901242 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert podName:7a422362-8c7e-4943-ad13-ca4089978ef9 nodeName:}" failed. No retries permitted until 2026-03-20 15:57:00.401222369 +0000 UTC m=+1149.823925320 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert") pod "openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" (UID: "7a422362-8c7e-4943-ad13-ca4089978ef9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901414 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnc2j\" (UniqueName: \"kubernetes.io/projected/a1747eb3-27f1-4f93-85c4-d786b8730bc9-kube-api-access-xnc2j\") pod \"swift-operator-controller-manager-c674c5965-nqn84\" (UID: \"a1747eb3-27f1-4f93-85c4-d786b8730bc9\") " pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901444 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfpfd\" (UniqueName: \"kubernetes.io/projected/31acb64e-576e-4b51-a8a3-37162d9161c0-kube-api-access-vfpfd\") pod \"telemetry-operator-controller-manager-d6b694c5-2xj66\" (UID: \"31acb64e-576e-4b51-a8a3-37162d9161c0\") " pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901496 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901546 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkdtq\" (UniqueName: \"kubernetes.io/projected/7a422362-8c7e-4943-ad13-ca4089978ef9-kube-api-access-hkdtq\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.901820 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" Mar 20 15:56:59 crc kubenswrapper[4813]: E0320 15:56:59.901995 4813 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 15:56:59 crc kubenswrapper[4813]: E0320 15:56:59.902039 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert podName:bcb8373c-ae7c-4646-a90a-fe965f70c9bd nodeName:}" failed. No retries permitted until 2026-03-20 15:57:00.902024921 +0000 UTC m=+1150.324727762 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert") pod "infra-operator-controller-manager-7b9c774f96-qzktk" (UID: "bcb8373c-ae7c-4646-a90a-fe965f70c9bd") : secret "infra-operator-webhook-server-cert" not found Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.918274 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.919604 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.922577 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkdtq\" (UniqueName: \"kubernetes.io/projected/7a422362-8c7e-4943-ad13-ca4089978ef9-kube-api-access-hkdtq\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.922723 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.922796 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.922826 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-dr26q" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.936713 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.936724 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whqhl\" (UniqueName: \"kubernetes.io/projected/3e11ecc5-9159-4144-a583-1b657f2349ea-kube-api-access-whqhl\") pod \"ovn-operator-controller-manager-884679f54-xvk2s\" (UID: \"3e11ecc5-9159-4144-a583-1b657f2349ea\") " pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.940257 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvtpx\" (UniqueName: \"kubernetes.io/projected/1847c24e-09bc-44da-9343-5ae3f93c1dd1-kube-api-access-nvtpx\") pod \"placement-operator-controller-manager-5784578c99-gpxm6\" (UID: \"1847c24e-09bc-44da-9343-5ae3f93c1dd1\") " pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.940897 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfpfd\" (UniqueName: \"kubernetes.io/projected/31acb64e-576e-4b51-a8a3-37162d9161c0-kube-api-access-vfpfd\") pod \"telemetry-operator-controller-manager-d6b694c5-2xj66\" (UID: \"31acb64e-576e-4b51-a8a3-37162d9161c0\") " pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.947160 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnc2j\" (UniqueName: \"kubernetes.io/projected/a1747eb3-27f1-4f93-85c4-d786b8730bc9-kube-api-access-xnc2j\") pod \"swift-operator-controller-manager-c674c5965-nqn84\" (UID: \"a1747eb3-27f1-4f93-85c4-d786b8730bc9\") " pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.965788 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.980781 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp"] Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.981631 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.984218 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-vchdg" Mar 20 15:56:59 crc kubenswrapper[4813]: I0320 15:56:59.988303 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.002589 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzrp9\" (UniqueName: \"kubernetes.io/projected/94bef8a0-0916-46e0-9253-8130ef2367da-kube-api-access-wzrp9\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.002635 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.002659 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pckw8\" (UniqueName: \"kubernetes.io/projected/d09f9f03-5f5c-4b17-b4b7-81ddc051aef3-kube-api-access-pckw8\") pod \"rabbitmq-cluster-operator-manager-668c99d594-jmwcp\" (UID: \"d09f9f03-5f5c-4b17-b4b7-81ddc051aef3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.002691 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqn8c\" (UniqueName: \"kubernetes.io/projected/758fbe07-6d40-4287-8b3b-e1c88bcb9665-kube-api-access-qqn8c\") pod \"watcher-operator-controller-manager-6c5858c67b-t8b5h\" (UID: \"758fbe07-6d40-4287-8b3b-e1c88bcb9665\") " pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.002708 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj24k\" (UniqueName: \"kubernetes.io/projected/2ce265a6-3022-420a-99dc-0cead55c568f-kube-api-access-wj24k\") pod \"test-operator-controller-manager-5c5cb9c4d7-5rxh5\" (UID: \"2ce265a6-3022-420a-99dc-0cead55c568f\") " pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.002754 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.035449 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.035587 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj24k\" (UniqueName: \"kubernetes.io/projected/2ce265a6-3022-420a-99dc-0cead55c568f-kube-api-access-wj24k\") pod \"test-operator-controller-manager-5c5cb9c4d7-5rxh5\" (UID: \"2ce265a6-3022-420a-99dc-0cead55c568f\") " pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.035616 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqn8c\" (UniqueName: \"kubernetes.io/projected/758fbe07-6d40-4287-8b3b-e1c88bcb9665-kube-api-access-qqn8c\") pod \"watcher-operator-controller-manager-6c5858c67b-t8b5h\" (UID: \"758fbe07-6d40-4287-8b3b-e1c88bcb9665\") " pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.103730 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzrp9\" (UniqueName: \"kubernetes.io/projected/94bef8a0-0916-46e0-9253-8130ef2367da-kube-api-access-wzrp9\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.103805 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.103850 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pckw8\" (UniqueName: \"kubernetes.io/projected/d09f9f03-5f5c-4b17-b4b7-81ddc051aef3-kube-api-access-pckw8\") pod \"rabbitmq-cluster-operator-manager-668c99d594-jmwcp\" (UID: \"d09f9f03-5f5c-4b17-b4b7-81ddc051aef3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.103950 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.104370 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.104460 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:00.604424357 +0000 UTC m=+1150.027127288 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.105047 4813 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.105091 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:00.605079485 +0000 UTC m=+1150.027782326 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "metrics-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.124845 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.125428 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pckw8\" (UniqueName: \"kubernetes.io/projected/d09f9f03-5f5c-4b17-b4b7-81ddc051aef3-kube-api-access-pckw8\") pod \"rabbitmq-cluster-operator-manager-668c99d594-jmwcp\" (UID: \"d09f9f03-5f5c-4b17-b4b7-81ddc051aef3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.126122 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzrp9\" (UniqueName: \"kubernetes.io/projected/94bef8a0-0916-46e0-9253-8130ef2367da-kube-api-access-wzrp9\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.144154 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.201848 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.246689 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.314544 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.318726 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.326637 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.345610 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.393735 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z"] Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.420114 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.420250 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert podName:7a422362-8c7e-4943-ad13-ca4089978ef9 nodeName:}" failed. No retries permitted until 2026-03-20 15:57:01.420232618 +0000 UTC m=+1150.842935459 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert") pod "openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" (UID: "7a422362-8c7e-4943-ad13-ca4089978ef9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.433635 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.540413 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.548868 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.563841 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.602290 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.640660 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.640766 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.640937 4813 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.640990 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:01.640975732 +0000 UTC m=+1151.063678573 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "metrics-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.641071 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.641148 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:01.641128456 +0000 UTC m=+1151.063831297 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.822678 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-767865f676-fstk4"] Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.834499 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6"] Mar 20 15:57:00 crc kubenswrapper[4813]: W0320 15:57:00.837452 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94489fb6_8195_4820_b4bf_87122803836a.slice/crio-49e03064e832c4466b8b8bdd203b9011ea3d6bdfdae2c13dd608b4edb8025862 WatchSource:0}: Error finding container 49e03064e832c4466b8b8bdd203b9011ea3d6bdfdae2c13dd608b4edb8025862: Status 404 returned error can't find the container with id 49e03064e832c4466b8b8bdd203b9011ea3d6bdfdae2c13dd608b4edb8025862 Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.888508 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727"] Mar 20 15:57:00 crc kubenswrapper[4813]: W0320 15:57:00.889972 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c1e7d53_7b72_4c38_bca1_94db6fd742d2.slice/crio-60595506b057b438bd41b7bb4a3f2be1d297e8aa52fc09f21b7559e9e3afc530 WatchSource:0}: Error finding container 60595506b057b438bd41b7bb4a3f2be1d297e8aa52fc09f21b7559e9e3afc530: Status 404 returned error can't find the container with id 60595506b057b438bd41b7bb4a3f2be1d297e8aa52fc09f21b7559e9e3afc530 Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.946058 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.946214 4813 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: E0320 15:57:00.946262 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert podName:bcb8373c-ae7c-4646-a90a-fe965f70c9bd nodeName:}" failed. No retries permitted until 2026-03-20 15:57:02.946247606 +0000 UTC m=+1152.368950447 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert") pod "infra-operator-controller-manager-7b9c774f96-qzktk" (UID: "bcb8373c-ae7c-4646-a90a-fe965f70c9bd") : secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:00 crc kubenswrapper[4813]: I0320 15:57:00.997106 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-c674c5965-nqn84"] Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.016303 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1747eb3_27f1_4f93_85c4_d786b8730bc9.slice/crio-b42edfa2c411f6c941920e0d135aa48bd568e96e7eb51f734df69e8a8dce618f WatchSource:0}: Error finding container b42edfa2c411f6c941920e0d135aa48bd568e96e7eb51f734df69e8a8dce618f: Status 404 returned error can't find the container with id b42edfa2c411f6c941920e0d135aa48bd568e96e7eb51f734df69e8a8dce618f Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.037361 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2"] Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.051285 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6"] Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.058140 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" event={"ID":"6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe","Type":"ContainerStarted","Data":"fa3bdc16c525a189d988348e7922b646c729b67a5eff4aac093ef44a1311ca38"} Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.060108 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1847c24e_09bc_44da_9343_5ae3f93c1dd1.slice/crio-c6fbcca1f98c5379d71416db1685da932b9bfbf6d96349424ca3777f35a82ee9 WatchSource:0}: Error finding container c6fbcca1f98c5379d71416db1685da932b9bfbf6d96349424ca3777f35a82ee9: Status 404 returned error can't find the container with id c6fbcca1f98c5379d71416db1685da932b9bfbf6d96349424ca3777f35a82ee9 Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.060141 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp"] Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.061893 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" event={"ID":"f0d2c31f-5223-4222-afa8-ba918af23dca","Type":"ContainerStarted","Data":"4951d0f0f311f5f503c3b1073f61151e1305aae9bb86c92263fbcd02308d9b40"} Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.063586 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" event={"ID":"7c7640f5-78e7-4dba-8900-26dc47eb640f","Type":"ContainerStarted","Data":"c0b66f3004bf5942589bca6bd46eeec7808216bd398d812cbaa810de98c5da39"} Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.063674 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e11ecc5_9159_4144_a583_1b657f2349ea.slice/crio-605ee638857fd7c4285fea8b826a1162db4e587e4a01f51007a9b54ac4805d97 WatchSource:0}: Error finding container 605ee638857fd7c4285fea8b826a1162db4e587e4a01f51007a9b54ac4805d97: Status 404 returned error can't find the container with id 605ee638857fd7c4285fea8b826a1162db4e587e4a01f51007a9b54ac4805d97 Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.064534 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" event={"ID":"7f918684-13af-4141-a414-9ac7b87e75d9","Type":"ContainerStarted","Data":"c279af1eaee6f6ee90896d6f9093eaa356396f6ac00b9d5148149e89e914f35d"} Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.065854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r"] Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.065903 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" event={"ID":"94489fb6-8195-4820-b4bf-87122803836a","Type":"ContainerStarted","Data":"49e03064e832c4466b8b8bdd203b9011ea3d6bdfdae2c13dd608b4edb8025862"} Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.067766 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:bef93f71d3b42a72d8b96c69bdb4db4b8bd797c5093a0a719443d7a5c9aaab55,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-whqhl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-884679f54-xvk2s_openstack-operators(3e11ecc5-9159-4144-a583-1b657f2349ea): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.067921 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode36b96c5_7bfb_4657_b69b_7eaeeff3b477.slice/crio-51fa4051eb1084169f9e6f020b0d27a731b085d240792ce124011ad31511d258 WatchSource:0}: Error finding container 51fa4051eb1084169f9e6f020b0d27a731b085d240792ce124011ad31511d258: Status 404 returned error can't find the container with id 51fa4051eb1084169f9e6f020b0d27a731b085d240792ce124011ad31511d258 Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.068861 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" podUID="3e11ecc5-9159-4144-a583-1b657f2349ea" Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.069332 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31acb64e_576e_4b51_a8a3_37162d9161c0.slice/crio-d906309940418572b00b83e16fe321f3d1fc018945f2c8784da33c35ab094348 WatchSource:0}: Error finding container d906309940418572b00b83e16fe321f3d1fc018945f2c8784da33c35ab094348: Status 404 returned error can't find the container with id d906309940418572b00b83e16fe321f3d1fc018945f2c8784da33c35ab094348 Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.069627 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" event={"ID":"829ecd99-4a08-4965-ab30-fb30ab8e2ead","Type":"ContainerStarted","Data":"dcaf57374391d58eca332e9d6c48c3af81b40c8e3a0f7bc0b666507f565af4a2"} Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.070876 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:425fd66675becbe0ca2b2fe1a5a6694ac6e0b1cdce9a77a7a37f99785eadc74a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j2cnr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-5b9f45d989-rncs2_openstack-operators(e36b96c5-7bfb-4657-b69b-7eaeeff3b477): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.071182 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" event={"ID":"a1747eb3-27f1-4f93-85c4-d786b8730bc9","Type":"ContainerStarted","Data":"b42edfa2c411f6c941920e0d135aa48bd568e96e7eb51f734df69e8a8dce618f"} Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.071263 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:c500fa7080b94105e85eeced772d8872e4168904e74ba02116e15ab66f522444,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vfpfd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-d6b694c5-2xj66_openstack-operators(31acb64e-576e-4b51-a8a3-37162d9161c0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.071445 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s"] Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.072031 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" podUID="e36b96c5-7bfb-4657-b69b-7eaeeff3b477" Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.072441 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" podUID="31acb64e-576e-4b51-a8a3-37162d9161c0" Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.072474 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" event={"ID":"200b39b4-9995-48fc-a31f-1708526bd9d8","Type":"ContainerStarted","Data":"f72f9f406cefa1aa5a965aed93ffc69c0736c0191bdf1e8f2b320e23220f690e"} Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.074094 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" event={"ID":"6c1e7d53-7b72-4c38-bca1-94db6fd742d2","Type":"ContainerStarted","Data":"60595506b057b438bd41b7bb4a3f2be1d297e8aa52fc09f21b7559e9e3afc530"} Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.074981 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35420d07_3f39_47f7_bc13_d5fc95954674.slice/crio-b1b3ce1f5f4021140ea19568b7104610d026099377121a6189a5c6a3bdc5cbbd WatchSource:0}: Error finding container b1b3ce1f5f4021140ea19568b7104610d026099377121a6189a5c6a3bdc5cbbd: Status 404 returned error can't find the container with id b1b3ce1f5f4021140ea19568b7104610d026099377121a6189a5c6a3bdc5cbbd Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.075643 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" event={"ID":"be5e97ef-9ad5-4663-bbf1-69573e2eedeb","Type":"ContainerStarted","Data":"ef07bb788e0aae1a2aecf69ab03f09ed90c9dbe272b996780b3109f616bf24d0"} Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.076343 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66"] Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.076956 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" event={"ID":"aee070f5-c22b-4b69-b116-685316825aaa","Type":"ContainerStarted","Data":"1ff3c8effe3cd2a1ba60ff0293ef26d0992267dfa10983517025685478b65229"} Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.077730 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" event={"ID":"a314b9f5-f2d9-445c-bf5d-a42dc479c21d","Type":"ContainerStarted","Data":"9d9cfc86c7ff3751a9e154958452610b4a327f6bfb2efc592c526f60ec86f355"} Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.078882 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd09f9f03_5f5c_4b17_b4b7_81ddc051aef3.slice/crio-83eb3823dd76fc6b0e22781afd864a0b66ecba7ee0274097af801416c530e041 WatchSource:0}: Error finding container 83eb3823dd76fc6b0e22781afd864a0b66ecba7ee0274097af801416c530e041: Status 404 returned error can't find the container with id 83eb3823dd76fc6b0e22781afd864a0b66ecba7ee0274097af801416c530e041 Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.079505 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:ec36a9083657587022f8471c9d5a71b87a7895398496e7fc546c73aa1eae4b56,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hm6wj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-768b96df4c-kvw9r_openstack-operators(35420d07-3f39-47f7-bc13-d5fc95954674): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.080603 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" podUID="35420d07-3f39-47f7-bc13-d5fc95954674" Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.080891 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pckw8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-jmwcp_openstack-operators(d09f9f03-5f5c-4b17-b4b7-81ddc051aef3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.084050 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" podUID="d09f9f03-5f5c-4b17-b4b7-81ddc051aef3" Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.189789 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5"] Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.206075 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h"] Mar 20 15:57:01 crc kubenswrapper[4813]: W0320 15:57:01.217120 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod758fbe07_6d40_4287_8b3b_e1c88bcb9665.slice/crio-ad2b3a46cda4da27969bc1a5fc869b281798f6d1e9a312c6af48957c688941c8 WatchSource:0}: Error finding container ad2b3a46cda4da27969bc1a5fc869b281798f6d1e9a312c6af48957c688941c8: Status 404 returned error can't find the container with id ad2b3a46cda4da27969bc1a5fc869b281798f6d1e9a312c6af48957c688941c8 Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.219575 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.147:5001/openstack-k8s-operators/watcher-operator:ee00c2d330b27d46c48ac29a20680b56ca50df3c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qqn8c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6c5858c67b-t8b5h_openstack-operators(758fbe07-6d40-4287-8b3b-e1c88bcb9665): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.220930 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.459491 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.461289 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.461369 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert podName:7a422362-8c7e-4943-ad13-ca4089978ef9 nodeName:}" failed. No retries permitted until 2026-03-20 15:57:03.461341549 +0000 UTC m=+1152.884044390 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert") pod "openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" (UID: "7a422362-8c7e-4943-ad13-ca4089978ef9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.663538 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:01 crc kubenswrapper[4813]: I0320 15:57:01.663667 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.663701 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.663774 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:03.663755895 +0000 UTC m=+1153.086458736 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "webhook-server-cert" not found Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.663880 4813 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 15:57:01 crc kubenswrapper[4813]: E0320 15:57:01.663959 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:03.66393712 +0000 UTC m=+1153.086640061 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "metrics-server-cert" not found Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.094574 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" event={"ID":"3e11ecc5-9159-4144-a583-1b657f2349ea","Type":"ContainerStarted","Data":"605ee638857fd7c4285fea8b826a1162db4e587e4a01f51007a9b54ac4805d97"} Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.096063 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:bef93f71d3b42a72d8b96c69bdb4db4b8bd797c5093a0a719443d7a5c9aaab55\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" podUID="3e11ecc5-9159-4144-a583-1b657f2349ea" Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.098649 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" event={"ID":"2ce265a6-3022-420a-99dc-0cead55c568f","Type":"ContainerStarted","Data":"88c6636125a19509f1bdceb5addbae7bc9dda579571c7eb1528eb11b7975fa31"} Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.100893 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" event={"ID":"1847c24e-09bc-44da-9343-5ae3f93c1dd1","Type":"ContainerStarted","Data":"c6fbcca1f98c5379d71416db1685da932b9bfbf6d96349424ca3777f35a82ee9"} Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.104403 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" event={"ID":"758fbe07-6d40-4287-8b3b-e1c88bcb9665","Type":"ContainerStarted","Data":"ad2b3a46cda4da27969bc1a5fc869b281798f6d1e9a312c6af48957c688941c8"} Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.108277 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.147:5001/openstack-k8s-operators/watcher-operator:ee00c2d330b27d46c48ac29a20680b56ca50df3c\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.109010 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" event={"ID":"e36b96c5-7bfb-4657-b69b-7eaeeff3b477","Type":"ContainerStarted","Data":"51fa4051eb1084169f9e6f020b0d27a731b085d240792ce124011ad31511d258"} Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.110729 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:425fd66675becbe0ca2b2fe1a5a6694ac6e0b1cdce9a77a7a37f99785eadc74a\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" podUID="e36b96c5-7bfb-4657-b69b-7eaeeff3b477" Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.115047 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" event={"ID":"31acb64e-576e-4b51-a8a3-37162d9161c0","Type":"ContainerStarted","Data":"d906309940418572b00b83e16fe321f3d1fc018945f2c8784da33c35ab094348"} Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.116336 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:c500fa7080b94105e85eeced772d8872e4168904e74ba02116e15ab66f522444\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" podUID="31acb64e-576e-4b51-a8a3-37162d9161c0" Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.119881 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" event={"ID":"35420d07-3f39-47f7-bc13-d5fc95954674","Type":"ContainerStarted","Data":"b1b3ce1f5f4021140ea19568b7104610d026099377121a6189a5c6a3bdc5cbbd"} Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.121574 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:ec36a9083657587022f8471c9d5a71b87a7895398496e7fc546c73aa1eae4b56\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" podUID="35420d07-3f39-47f7-bc13-d5fc95954674" Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.127942 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" event={"ID":"d09f9f03-5f5c-4b17-b4b7-81ddc051aef3","Type":"ContainerStarted","Data":"83eb3823dd76fc6b0e22781afd864a0b66ecba7ee0274097af801416c530e041"} Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.130909 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" podUID="d09f9f03-5f5c-4b17-b4b7-81ddc051aef3" Mar 20 15:57:02 crc kubenswrapper[4813]: I0320 15:57:02.988246 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.988433 4813 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:02 crc kubenswrapper[4813]: E0320 15:57:02.988522 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert podName:bcb8373c-ae7c-4646-a90a-fe965f70c9bd nodeName:}" failed. No retries permitted until 2026-03-20 15:57:06.988502826 +0000 UTC m=+1156.411205667 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert") pod "infra-operator-controller-manager-7b9c774f96-qzktk" (UID: "bcb8373c-ae7c-4646-a90a-fe965f70c9bd") : secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.135791 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.147:5001/openstack-k8s-operators/watcher-operator:ee00c2d330b27d46c48ac29a20680b56ca50df3c\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.135893 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:bef93f71d3b42a72d8b96c69bdb4db4b8bd797c5093a0a719443d7a5c9aaab55\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" podUID="3e11ecc5-9159-4144-a583-1b657f2349ea" Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.135930 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:ec36a9083657587022f8471c9d5a71b87a7895398496e7fc546c73aa1eae4b56\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" podUID="35420d07-3f39-47f7-bc13-d5fc95954674" Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.136013 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:425fd66675becbe0ca2b2fe1a5a6694ac6e0b1cdce9a77a7a37f99785eadc74a\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" podUID="e36b96c5-7bfb-4657-b69b-7eaeeff3b477" Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.136002 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:c500fa7080b94105e85eeced772d8872e4168904e74ba02116e15ab66f522444\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" podUID="31acb64e-576e-4b51-a8a3-37162d9161c0" Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.136589 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" podUID="d09f9f03-5f5c-4b17-b4b7-81ddc051aef3" Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.496161 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.496357 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.496591 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert podName:7a422362-8c7e-4943-ad13-ca4089978ef9 nodeName:}" failed. No retries permitted until 2026-03-20 15:57:07.496570248 +0000 UTC m=+1156.919273089 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert") pod "openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" (UID: "7a422362-8c7e-4943-ad13-ca4089978ef9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.699974 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.700146 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.700188 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.700268 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:07.700250499 +0000 UTC m=+1157.122953340 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "webhook-server-cert" not found Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.700294 4813 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 15:57:03 crc kubenswrapper[4813]: E0320 15:57:03.700385 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:07.700363062 +0000 UTC m=+1157.123065943 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "metrics-server-cert" not found Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.842775 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.842839 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.842887 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.843673 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"58e6bc2a6d9001d676e2ad6e8e29a8d0759512d8590133534bf74186af5900d4"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 15:57:03 crc kubenswrapper[4813]: I0320 15:57:03.843736 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://58e6bc2a6d9001d676e2ad6e8e29a8d0759512d8590133534bf74186af5900d4" gracePeriod=600 Mar 20 15:57:04 crc kubenswrapper[4813]: I0320 15:57:04.158803 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="58e6bc2a6d9001d676e2ad6e8e29a8d0759512d8590133534bf74186af5900d4" exitCode=0 Mar 20 15:57:04 crc kubenswrapper[4813]: I0320 15:57:04.158840 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"58e6bc2a6d9001d676e2ad6e8e29a8d0759512d8590133534bf74186af5900d4"} Mar 20 15:57:04 crc kubenswrapper[4813]: I0320 15:57:04.158899 4813 scope.go:117] "RemoveContainer" containerID="c4e8d3a270bd3cde9b5259a1a03876e8e2c58d54ad599c8e659fb3e3d9e094f9" Mar 20 15:57:06 crc kubenswrapper[4813]: I0320 15:57:06.341965 4813 scope.go:117] "RemoveContainer" containerID="8ed9e6eb27f590d7f19fd05f14df351407957a09bb413dc5a49fb0678e8f9ec9" Mar 20 15:57:07 crc kubenswrapper[4813]: I0320 15:57:07.056308 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.056529 4813 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.056813 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert podName:bcb8373c-ae7c-4646-a90a-fe965f70c9bd nodeName:}" failed. No retries permitted until 2026-03-20 15:57:15.056790475 +0000 UTC m=+1164.479493356 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert") pod "infra-operator-controller-manager-7b9c774f96-qzktk" (UID: "bcb8373c-ae7c-4646-a90a-fe965f70c9bd") : secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:07 crc kubenswrapper[4813]: I0320 15:57:07.563820 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.564043 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.564134 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert podName:7a422362-8c7e-4943-ad13-ca4089978ef9 nodeName:}" failed. No retries permitted until 2026-03-20 15:57:15.564111537 +0000 UTC m=+1164.986814388 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert") pod "openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" (UID: "7a422362-8c7e-4943-ad13-ca4089978ef9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:07 crc kubenswrapper[4813]: I0320 15:57:07.767047 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:07 crc kubenswrapper[4813]: I0320 15:57:07.767143 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.767270 4813 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.767344 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:15.767326465 +0000 UTC m=+1165.190029306 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "metrics-server-cert" not found Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.767361 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 15:57:07 crc kubenswrapper[4813]: E0320 15:57:07.767540 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:15.767516681 +0000 UTC m=+1165.190219532 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "webhook-server-cert" not found Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.224565 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" event={"ID":"be5e97ef-9ad5-4663-bbf1-69573e2eedeb","Type":"ContainerStarted","Data":"e1423ff817266049ee6a3b8cb96e69b728d7cdd127daffc57551bc8f0631ef21"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.225239 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.227646 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"957002a8822874ec45a5cbe2ca3717cafc8492693f474f6fa7fd364b2cfa8d50"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.229075 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" event={"ID":"a1747eb3-27f1-4f93-85c4-d786b8730bc9","Type":"ContainerStarted","Data":"24b2e27b86a594caad426304e54d10d8d8305ff86804f1bf880b4424643b8e83"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.229200 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.230473 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" event={"ID":"7c7640f5-78e7-4dba-8900-26dc47eb640f","Type":"ContainerStarted","Data":"2cdc0ac25799bec16d59f81abc52e633fdc09e5d6d7920bfae5d442a385d1d61"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.231077 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.232993 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" event={"ID":"6c1e7d53-7b72-4c38-bca1-94db6fd742d2","Type":"ContainerStarted","Data":"11b7798f8e595fca247773b038895d0487b98cc53d837246a6338f825ca10fa5"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.233106 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.234363 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" event={"ID":"94489fb6-8195-4820-b4bf-87122803836a","Type":"ContainerStarted","Data":"483e1a46e0d54f0e237fefc95bbab02104603fc6b273c24ed9b6e8a2ddea0aa9"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.234774 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.236102 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" event={"ID":"829ecd99-4a08-4965-ab30-fb30ab8e2ead","Type":"ContainerStarted","Data":"a3d0a7f7131f8729661e525c601196b9da1ba4eebf218ee8507d4368ad915021"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.236455 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.239104 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" event={"ID":"a314b9f5-f2d9-445c-bf5d-a42dc479c21d","Type":"ContainerStarted","Data":"98c9e48b05e0bf862ee39202168673ad44ec6e27d36433e0fec3decf2ff224a6"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.239562 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.240871 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" event={"ID":"1847c24e-09bc-44da-9343-5ae3f93c1dd1","Type":"ContainerStarted","Data":"118f888eece148fb04d51327d1c7b4133d218b4291924b903da5c68c13deeacc"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.241207 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.242805 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" event={"ID":"200b39b4-9995-48fc-a31f-1708526bd9d8","Type":"ContainerStarted","Data":"583c8f010a3118d14ca564d49fdc83e90fac3dd584e1f6ec3774125c489c059e"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.243173 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.244638 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" event={"ID":"2ce265a6-3022-420a-99dc-0cead55c568f","Type":"ContainerStarted","Data":"0be76de242d1bcd2e14e101d6fe00c03c081aba33c869b4ab0b73d6650b91fb2"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.245011 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.246343 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" event={"ID":"aee070f5-c22b-4b69-b116-685316825aaa","Type":"ContainerStarted","Data":"7ab12eee2ab8be46d26ee0d194ec35f6459dea855efe9be6f4c8bb8c5138fb7c"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.246669 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.248243 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" event={"ID":"6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe","Type":"ContainerStarted","Data":"cd682be290bb923ea89aad7d8f64b0671820463a0d6e50328ef4e1e4e82a5826"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.248631 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.250167 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" event={"ID":"f0d2c31f-5223-4222-afa8-ba918af23dca","Type":"ContainerStarted","Data":"fa64b1eabf3e78fc06836964b298e9af8906908adb72268e71c131ff94aa988b"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.250247 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.251899 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" event={"ID":"7f918684-13af-4141-a414-9ac7b87e75d9","Type":"ContainerStarted","Data":"409c5b9aebe47948eff2e9bdc553d2ed3d246ffbff22933e0c54e6b89c52b04c"} Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.252058 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.267514 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" podStartSLOduration=3.526411514 podStartE2EDuration="15.26749423s" podCreationTimestamp="2026-03-20 15:56:58 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.622267465 +0000 UTC m=+1150.044970306" lastFinishedPulling="2026-03-20 15:57:12.363350181 +0000 UTC m=+1161.786053022" observedRunningTime="2026-03-20 15:57:13.261009194 +0000 UTC m=+1162.683712035" watchObservedRunningTime="2026-03-20 15:57:13.26749423 +0000 UTC m=+1162.690197071" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.312679 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" podStartSLOduration=2.438138611 podStartE2EDuration="14.312661724s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.46641593 +0000 UTC m=+1149.889118771" lastFinishedPulling="2026-03-20 15:57:12.340939053 +0000 UTC m=+1161.763641884" observedRunningTime="2026-03-20 15:57:13.311166774 +0000 UTC m=+1162.733869615" watchObservedRunningTime="2026-03-20 15:57:13.312661724 +0000 UTC m=+1162.735364565" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.393724 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" podStartSLOduration=3.423674719 podStartE2EDuration="15.393701091s" podCreationTimestamp="2026-03-20 15:56:58 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.367659423 +0000 UTC m=+1149.790362264" lastFinishedPulling="2026-03-20 15:57:12.337685795 +0000 UTC m=+1161.760388636" observedRunningTime="2026-03-20 15:57:13.345590687 +0000 UTC m=+1162.768293528" watchObservedRunningTime="2026-03-20 15:57:13.393701091 +0000 UTC m=+1162.816403932" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.427550 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" podStartSLOduration=3.291922604 podStartE2EDuration="14.427532928s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.207054286 +0000 UTC m=+1150.629757127" lastFinishedPulling="2026-03-20 15:57:12.34266461 +0000 UTC m=+1161.765367451" observedRunningTime="2026-03-20 15:57:13.396274891 +0000 UTC m=+1162.818977732" watchObservedRunningTime="2026-03-20 15:57:13.427532928 +0000 UTC m=+1162.850235769" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.430578 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" podStartSLOduration=3.112591153 podStartE2EDuration="14.430570301s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.020131799 +0000 UTC m=+1150.442834640" lastFinishedPulling="2026-03-20 15:57:12.338110937 +0000 UTC m=+1161.760813788" observedRunningTime="2026-03-20 15:57:13.426655094 +0000 UTC m=+1162.849357935" watchObservedRunningTime="2026-03-20 15:57:13.430570301 +0000 UTC m=+1162.853273142" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.452138 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" podStartSLOduration=3.134494417 podStartE2EDuration="14.452121075s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.062639021 +0000 UTC m=+1150.485341862" lastFinishedPulling="2026-03-20 15:57:12.380265679 +0000 UTC m=+1161.802968520" observedRunningTime="2026-03-20 15:57:13.449121063 +0000 UTC m=+1162.871823904" watchObservedRunningTime="2026-03-20 15:57:13.452121075 +0000 UTC m=+1162.874823916" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.490564 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" podStartSLOduration=2.791333424 podStartE2EDuration="14.490550586s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.621991387 +0000 UTC m=+1150.044694218" lastFinishedPulling="2026-03-20 15:57:12.321208539 +0000 UTC m=+1161.743911380" observedRunningTime="2026-03-20 15:57:13.486317042 +0000 UTC m=+1162.909019883" watchObservedRunningTime="2026-03-20 15:57:13.490550586 +0000 UTC m=+1162.913253427" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.517215 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" podStartSLOduration=3.544246357 podStartE2EDuration="15.517196309s" podCreationTimestamp="2026-03-20 15:56:58 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.367701314 +0000 UTC m=+1149.790404145" lastFinishedPulling="2026-03-20 15:57:12.340651266 +0000 UTC m=+1161.763354097" observedRunningTime="2026-03-20 15:57:13.514139236 +0000 UTC m=+1162.936842077" watchObservedRunningTime="2026-03-20 15:57:13.517196309 +0000 UTC m=+1162.939899150" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.533076 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" podStartSLOduration=3.030264182 podStartE2EDuration="14.533056269s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.837881659 +0000 UTC m=+1150.260584500" lastFinishedPulling="2026-03-20 15:57:12.340673716 +0000 UTC m=+1161.763376587" observedRunningTime="2026-03-20 15:57:13.530595672 +0000 UTC m=+1162.953298513" watchObservedRunningTime="2026-03-20 15:57:13.533056269 +0000 UTC m=+1162.955759110" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.575554 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" podStartSLOduration=3.147316784 podStartE2EDuration="14.57552895s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.892710885 +0000 UTC m=+1150.315413726" lastFinishedPulling="2026-03-20 15:57:12.320923051 +0000 UTC m=+1161.743625892" observedRunningTime="2026-03-20 15:57:13.562425245 +0000 UTC m=+1162.985128086" watchObservedRunningTime="2026-03-20 15:57:13.57552895 +0000 UTC m=+1162.998231801" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.582958 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" podStartSLOduration=3.869876745 podStartE2EDuration="15.582939721s" podCreationTimestamp="2026-03-20 15:56:58 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.607729231 +0000 UTC m=+1150.030432072" lastFinishedPulling="2026-03-20 15:57:12.320792207 +0000 UTC m=+1161.743495048" observedRunningTime="2026-03-20 15:57:13.582931781 +0000 UTC m=+1163.005634622" watchObservedRunningTime="2026-03-20 15:57:13.582939721 +0000 UTC m=+1163.005642562" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.617819 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" podStartSLOduration=3.755535465 podStartE2EDuration="15.617773195s" podCreationTimestamp="2026-03-20 15:56:58 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.367992422 +0000 UTC m=+1149.790695263" lastFinishedPulling="2026-03-20 15:57:12.230230152 +0000 UTC m=+1161.652932993" observedRunningTime="2026-03-20 15:57:13.613382186 +0000 UTC m=+1163.036085027" watchObservedRunningTime="2026-03-20 15:57:13.617773195 +0000 UTC m=+1163.040476036" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.630024 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" podStartSLOduration=3.130835278 podStartE2EDuration="14.630005287s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.84233049 +0000 UTC m=+1150.265033331" lastFinishedPulling="2026-03-20 15:57:12.341500499 +0000 UTC m=+1161.764203340" observedRunningTime="2026-03-20 15:57:13.627141719 +0000 UTC m=+1163.049844560" watchObservedRunningTime="2026-03-20 15:57:13.630005287 +0000 UTC m=+1163.052708118" Mar 20 15:57:13 crc kubenswrapper[4813]: I0320 15:57:13.656611 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" podStartSLOduration=3.956748659 podStartE2EDuration="15.656592597s" podCreationTimestamp="2026-03-20 15:56:58 +0000 UTC" firstStartedPulling="2026-03-20 15:57:00.620141047 +0000 UTC m=+1150.042843888" lastFinishedPulling="2026-03-20 15:57:12.319984985 +0000 UTC m=+1161.742687826" observedRunningTime="2026-03-20 15:57:13.652972269 +0000 UTC m=+1163.075675110" watchObservedRunningTime="2026-03-20 15:57:13.656592597 +0000 UTC m=+1163.079295438" Mar 20 15:57:15 crc kubenswrapper[4813]: I0320 15:57:15.083808 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.084203 4813 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.084249 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert podName:bcb8373c-ae7c-4646-a90a-fe965f70c9bd nodeName:}" failed. No retries permitted until 2026-03-20 15:57:31.084234917 +0000 UTC m=+1180.506937758 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert") pod "infra-operator-controller-manager-7b9c774f96-qzktk" (UID: "bcb8373c-ae7c-4646-a90a-fe965f70c9bd") : secret "infra-operator-webhook-server-cert" not found Mar 20 15:57:15 crc kubenswrapper[4813]: I0320 15:57:15.592888 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.593031 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.593357 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert podName:7a422362-8c7e-4943-ad13-ca4089978ef9 nodeName:}" failed. No retries permitted until 2026-03-20 15:57:31.593338946 +0000 UTC m=+1181.016041787 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert") pod "openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" (UID: "7a422362-8c7e-4943-ad13-ca4089978ef9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.796400 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 15:57:15 crc kubenswrapper[4813]: I0320 15:57:15.796405 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.796728 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:31.796708669 +0000 UTC m=+1181.219411520 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "webhook-server-cert" not found Mar 20 15:57:15 crc kubenswrapper[4813]: I0320 15:57:15.796998 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.797117 4813 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 15:57:15 crc kubenswrapper[4813]: E0320 15:57:15.797190 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs podName:94bef8a0-0916-46e0-9253-8130ef2367da nodeName:}" failed. No retries permitted until 2026-03-20 15:57:31.797170942 +0000 UTC m=+1181.219873853 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs") pod "openstack-operator-controller-manager-6f58c59cbb-66v95" (UID: "94bef8a0-0916-46e0-9253-8130ef2367da") : secret "metrics-server-cert" not found Mar 20 15:57:16 crc kubenswrapper[4813]: I0320 15:57:16.282197 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" event={"ID":"758fbe07-6d40-4287-8b3b-e1c88bcb9665","Type":"ContainerStarted","Data":"9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572"} Mar 20 15:57:16 crc kubenswrapper[4813]: I0320 15:57:16.283470 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:57:16 crc kubenswrapper[4813]: I0320 15:57:16.306110 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" podStartSLOduration=2.658073421 podStartE2EDuration="17.306092967s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.219438071 +0000 UTC m=+1150.642140912" lastFinishedPulling="2026-03-20 15:57:15.867457607 +0000 UTC m=+1165.290160458" observedRunningTime="2026-03-20 15:57:16.298205093 +0000 UTC m=+1165.720907934" watchObservedRunningTime="2026-03-20 15:57:16.306092967 +0000 UTC m=+1165.728795808" Mar 20 15:57:18 crc kubenswrapper[4813]: I0320 15:57:18.306990 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" event={"ID":"e36b96c5-7bfb-4657-b69b-7eaeeff3b477","Type":"ContainerStarted","Data":"e9365d9d29240656631c634532a2e32674540e35086cb496a76663ba7f2646f1"} Mar 20 15:57:18 crc kubenswrapper[4813]: I0320 15:57:18.308334 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" Mar 20 15:57:18 crc kubenswrapper[4813]: I0320 15:57:18.328124 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" podStartSLOduration=3.243791449 podStartE2EDuration="19.328106359s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.070758371 +0000 UTC m=+1150.493461212" lastFinishedPulling="2026-03-20 15:57:17.155073281 +0000 UTC m=+1166.577776122" observedRunningTime="2026-03-20 15:57:18.326594518 +0000 UTC m=+1167.749297349" watchObservedRunningTime="2026-03-20 15:57:18.328106359 +0000 UTC m=+1167.750809200" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.276991 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-2l2s5" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.334244 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-lzq5s" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.602962 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-q88gq" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.603039 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-cff9z" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.691634 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-z8zt6" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.725434 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-mqm85" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.745507 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-v8t7w" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.810739 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-r7v2l" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.849824 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-55f864c847-6xnn6" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.858837 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-vj727" Mar 20 15:57:19 crc kubenswrapper[4813]: I0320 15:57:19.877572 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-767865f676-fstk4" Mar 20 15:57:20 crc kubenswrapper[4813]: I0320 15:57:20.037705 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5784578c99-gpxm6" Mar 20 15:57:20 crc kubenswrapper[4813]: I0320 15:57:20.128193 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-c674c5965-nqn84" Mar 20 15:57:20 crc kubenswrapper[4813]: I0320 15:57:20.205342 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-5rxh5" Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.376569 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" event={"ID":"31acb64e-576e-4b51-a8a3-37162d9161c0","Type":"ContainerStarted","Data":"c37c98889aa913a7b4397d69615167fd823c0b43bbcc29ef7a8b65e9f932e13f"} Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.377372 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.378681 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" event={"ID":"35420d07-3f39-47f7-bc13-d5fc95954674","Type":"ContainerStarted","Data":"2914fe746dbaa7006f2fbeeeb7a1640d8fcf6e3cab942bc746c53cb66fdf48d0"} Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.378882 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.380072 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" event={"ID":"d09f9f03-5f5c-4b17-b4b7-81ddc051aef3","Type":"ContainerStarted","Data":"b1cc0c5efe0bc28218ea1347a197a843d856ddd25d5200784f3fc79f83472b6f"} Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.381431 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" event={"ID":"3e11ecc5-9159-4144-a583-1b657f2349ea","Type":"ContainerStarted","Data":"81e14db880b45e585ccc35e75303933901662638d2898c567c1a73c166657c11"} Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.381659 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.391627 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" podStartSLOduration=3.313111888 podStartE2EDuration="26.391608639s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.071120631 +0000 UTC m=+1150.493823462" lastFinishedPulling="2026-03-20 15:57:24.149617362 +0000 UTC m=+1173.572320213" observedRunningTime="2026-03-20 15:57:25.389734928 +0000 UTC m=+1174.812437769" watchObservedRunningTime="2026-03-20 15:57:25.391608639 +0000 UTC m=+1174.814311480" Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.415595 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" podStartSLOduration=3.344998173 podStartE2EDuration="26.415576469s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.079337874 +0000 UTC m=+1150.502040715" lastFinishedPulling="2026-03-20 15:57:24.14991617 +0000 UTC m=+1173.572619011" observedRunningTime="2026-03-20 15:57:25.412734272 +0000 UTC m=+1174.835437113" watchObservedRunningTime="2026-03-20 15:57:25.415576469 +0000 UTC m=+1174.838279310" Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.437828 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" podStartSLOduration=3.356177275 podStartE2EDuration="26.437810041s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.067655067 +0000 UTC m=+1150.490357908" lastFinishedPulling="2026-03-20 15:57:24.149287833 +0000 UTC m=+1173.571990674" observedRunningTime="2026-03-20 15:57:25.432562179 +0000 UTC m=+1174.855265020" watchObservedRunningTime="2026-03-20 15:57:25.437810041 +0000 UTC m=+1174.860512872" Mar 20 15:57:25 crc kubenswrapper[4813]: I0320 15:57:25.452225 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jmwcp" podStartSLOduration=3.347180801 podStartE2EDuration="26.452204291s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:01.080786753 +0000 UTC m=+1150.503489594" lastFinishedPulling="2026-03-20 15:57:24.185810243 +0000 UTC m=+1173.608513084" observedRunningTime="2026-03-20 15:57:25.451873763 +0000 UTC m=+1174.874576604" watchObservedRunningTime="2026-03-20 15:57:25.452204291 +0000 UTC m=+1174.874907132" Mar 20 15:57:29 crc kubenswrapper[4813]: I0320 15:57:29.833623 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-kvw9r" Mar 20 15:57:30 crc kubenswrapper[4813]: I0320 15:57:29.905046 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-rncs2" Mar 20 15:57:30 crc kubenswrapper[4813]: I0320 15:57:29.970182 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-884679f54-xvk2s" Mar 20 15:57:30 crc kubenswrapper[4813]: I0320 15:57:30.146740 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-2xj66" Mar 20 15:57:30 crc kubenswrapper[4813]: I0320 15:57:30.249001 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.141665 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.150252 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bcb8373c-ae7c-4646-a90a-fe965f70c9bd-cert\") pod \"infra-operator-controller-manager-7b9c774f96-qzktk\" (UID: \"bcb8373c-ae7c-4646-a90a-fe965f70c9bd\") " pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.287248 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.648600 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.656398 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7a422362-8c7e-4943-ad13-ca4089978ef9-cert\") pod \"openstack-baremetal-operator-controller-manager-89d64c458-ntg7z\" (UID: \"7a422362-8c7e-4943-ad13-ca4089978ef9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.750313 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk"] Mar 20 15:57:31 crc kubenswrapper[4813]: W0320 15:57:31.761162 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcb8373c_ae7c_4646_a90a_fe965f70c9bd.slice/crio-8014836d72e49a5104a108fd4ea61912f5f21bbd266072aa1e221740e1612b1b WatchSource:0}: Error finding container 8014836d72e49a5104a108fd4ea61912f5f21bbd266072aa1e221740e1612b1b: Status 404 returned error can't find the container with id 8014836d72e49a5104a108fd4ea61912f5f21bbd266072aa1e221740e1612b1b Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.852085 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.852248 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.855775 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-webhook-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.856160 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/94bef8a0-0916-46e0-9253-8130ef2367da-metrics-certs\") pod \"openstack-operator-controller-manager-6f58c59cbb-66v95\" (UID: \"94bef8a0-0916-46e0-9253-8130ef2367da\") " pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:31 crc kubenswrapper[4813]: I0320 15:57:31.877412 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:32 crc kubenswrapper[4813]: I0320 15:57:32.060813 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:32 crc kubenswrapper[4813]: I0320 15:57:32.325222 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z"] Mar 20 15:57:32 crc kubenswrapper[4813]: W0320 15:57:32.326734 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a422362_8c7e_4943_ad13_ca4089978ef9.slice/crio-e98639497445a5c22ac77d3266af8bd0525f22133f52647b0b6f611dfaf751a5 WatchSource:0}: Error finding container e98639497445a5c22ac77d3266af8bd0525f22133f52647b0b6f611dfaf751a5: Status 404 returned error can't find the container with id e98639497445a5c22ac77d3266af8bd0525f22133f52647b0b6f611dfaf751a5 Mar 20 15:57:32 crc kubenswrapper[4813]: I0320 15:57:32.431882 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" event={"ID":"7a422362-8c7e-4943-ad13-ca4089978ef9","Type":"ContainerStarted","Data":"e98639497445a5c22ac77d3266af8bd0525f22133f52647b0b6f611dfaf751a5"} Mar 20 15:57:32 crc kubenswrapper[4813]: I0320 15:57:32.433070 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" event={"ID":"bcb8373c-ae7c-4646-a90a-fe965f70c9bd","Type":"ContainerStarted","Data":"8014836d72e49a5104a108fd4ea61912f5f21bbd266072aa1e221740e1612b1b"} Mar 20 15:57:32 crc kubenswrapper[4813]: I0320 15:57:32.561850 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95"] Mar 20 15:57:33 crc kubenswrapper[4813]: I0320 15:57:33.442445 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" event={"ID":"94bef8a0-0916-46e0-9253-8130ef2367da","Type":"ContainerStarted","Data":"cb62c43c447e6b5ce1abec78542c550a14efbd9cae15e9a4186792d48bba1a48"} Mar 20 15:57:37 crc kubenswrapper[4813]: I0320 15:57:37.478405 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" event={"ID":"94bef8a0-0916-46e0-9253-8130ef2367da","Type":"ContainerStarted","Data":"c643c0651139042eac3ddeb02c1b65ae7684d085d4ae879120a1d3afd414b76e"} Mar 20 15:57:38 crc kubenswrapper[4813]: I0320 15:57:38.487550 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:38 crc kubenswrapper[4813]: I0320 15:57:38.543903 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" podStartSLOduration=39.543884117 podStartE2EDuration="39.543884117s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:57:38.540804184 +0000 UTC m=+1187.963507025" watchObservedRunningTime="2026-03-20 15:57:38.543884117 +0000 UTC m=+1187.966586958" Mar 20 15:57:40 crc kubenswrapper[4813]: I0320 15:57:40.501116 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" event={"ID":"7a422362-8c7e-4943-ad13-ca4089978ef9","Type":"ContainerStarted","Data":"900edbcc6f614253d7252c84d5bf978e46b541926753a2083508b50535cbea44"} Mar 20 15:57:40 crc kubenswrapper[4813]: I0320 15:57:40.501660 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:40 crc kubenswrapper[4813]: I0320 15:57:40.502606 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" event={"ID":"bcb8373c-ae7c-4646-a90a-fe965f70c9bd","Type":"ContainerStarted","Data":"8d4ebfebfe9375978e1d8afc67946a4b40ceb80eaa9249c34a494c53fab3395c"} Mar 20 15:57:40 crc kubenswrapper[4813]: I0320 15:57:40.502843 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:40 crc kubenswrapper[4813]: I0320 15:57:40.551416 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" podStartSLOduration=34.124614096 podStartE2EDuration="41.551389884s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:32.329458713 +0000 UTC m=+1181.752161554" lastFinishedPulling="2026-03-20 15:57:39.756234501 +0000 UTC m=+1189.178937342" observedRunningTime="2026-03-20 15:57:40.520925629 +0000 UTC m=+1189.943628480" watchObservedRunningTime="2026-03-20 15:57:40.551389884 +0000 UTC m=+1189.974092725" Mar 20 15:57:40 crc kubenswrapper[4813]: I0320 15:57:40.552707 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" podStartSLOduration=33.6034658 podStartE2EDuration="41.55269956s" podCreationTimestamp="2026-03-20 15:56:59 +0000 UTC" firstStartedPulling="2026-03-20 15:57:31.764044956 +0000 UTC m=+1181.186747797" lastFinishedPulling="2026-03-20 15:57:39.713278716 +0000 UTC m=+1189.135981557" observedRunningTime="2026-03-20 15:57:40.548525107 +0000 UTC m=+1189.971227948" watchObservedRunningTime="2026-03-20 15:57:40.55269956 +0000 UTC m=+1189.975402401" Mar 20 15:57:42 crc kubenswrapper[4813]: I0320 15:57:42.068158 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6f58c59cbb-66v95" Mar 20 15:57:51 crc kubenswrapper[4813]: I0320 15:57:51.295311 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7b9c774f96-qzktk" Mar 20 15:57:51 crc kubenswrapper[4813]: I0320 15:57:51.884692 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-89d64c458-ntg7z" Mar 20 15:57:56 crc kubenswrapper[4813]: I0320 15:57:56.410033 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h"] Mar 20 15:57:56 crc kubenswrapper[4813]: I0320 15:57:56.410750 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" containerName="manager" containerID="cri-o://9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572" gracePeriod=10 Mar 20 15:57:56 crc kubenswrapper[4813]: I0320 15:57:56.687812 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-646f48576b-bmc92"] Mar 20 15:57:56 crc kubenswrapper[4813]: I0320 15:57:56.688017 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" podUID="eee8c4ad-1041-4c34-8c8f-b2feb75fbee8" containerName="operator" containerID="cri-o://a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c" gracePeriod=10 Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.298907 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.437874 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqn8c\" (UniqueName: \"kubernetes.io/projected/758fbe07-6d40-4287-8b3b-e1c88bcb9665-kube-api-access-qqn8c\") pod \"758fbe07-6d40-4287-8b3b-e1c88bcb9665\" (UID: \"758fbe07-6d40-4287-8b3b-e1c88bcb9665\") " Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.444177 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/758fbe07-6d40-4287-8b3b-e1c88bcb9665-kube-api-access-qqn8c" (OuterVolumeSpecName: "kube-api-access-qqn8c") pod "758fbe07-6d40-4287-8b3b-e1c88bcb9665" (UID: "758fbe07-6d40-4287-8b3b-e1c88bcb9665"). InnerVolumeSpecName "kube-api-access-qqn8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.507274 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.539320 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqn8c\" (UniqueName: \"kubernetes.io/projected/758fbe07-6d40-4287-8b3b-e1c88bcb9665-kube-api-access-qqn8c\") on node \"crc\" DevicePath \"\"" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.640444 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whldn\" (UniqueName: \"kubernetes.io/projected/eee8c4ad-1041-4c34-8c8f-b2feb75fbee8-kube-api-access-whldn\") pod \"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8\" (UID: \"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8\") " Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.644508 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eee8c4ad-1041-4c34-8c8f-b2feb75fbee8-kube-api-access-whldn" (OuterVolumeSpecName: "kube-api-access-whldn") pod "eee8c4ad-1041-4c34-8c8f-b2feb75fbee8" (UID: "eee8c4ad-1041-4c34-8c8f-b2feb75fbee8"). InnerVolumeSpecName "kube-api-access-whldn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.650428 4813 generic.go:334] "Generic (PLEG): container finished" podID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" containerID="9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572" exitCode=0 Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.650489 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" event={"ID":"758fbe07-6d40-4287-8b3b-e1c88bcb9665","Type":"ContainerDied","Data":"9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572"} Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.650537 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.650773 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h" event={"ID":"758fbe07-6d40-4287-8b3b-e1c88bcb9665","Type":"ContainerDied","Data":"ad2b3a46cda4da27969bc1a5fc869b281798f6d1e9a312c6af48957c688941c8"} Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.650870 4813 scope.go:117] "RemoveContainer" containerID="9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.652316 4813 generic.go:334] "Generic (PLEG): container finished" podID="eee8c4ad-1041-4c34-8c8f-b2feb75fbee8" containerID="a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c" exitCode=0 Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.652349 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.652369 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" event={"ID":"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8","Type":"ContainerDied","Data":"a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c"} Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.652410 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-646f48576b-bmc92" event={"ID":"eee8c4ad-1041-4c34-8c8f-b2feb75fbee8","Type":"ContainerDied","Data":"363b202f4726359de8aa427a8a5160becb20bea1906e4f85296980a0ef070225"} Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.689374 4813 scope.go:117] "RemoveContainer" containerID="9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572" Mar 20 15:57:57 crc kubenswrapper[4813]: E0320 15:57:57.689834 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572\": container with ID starting with 9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572 not found: ID does not exist" containerID="9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.689877 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572"} err="failed to get container status \"9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572\": rpc error: code = NotFound desc = could not find container \"9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572\": container with ID starting with 9837a96f06f82d434c40a5fdb497294527b3ec36238e00f76009cdb5cf52b572 not found: ID does not exist" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.689903 4813 scope.go:117] "RemoveContainer" containerID="a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.698236 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h"] Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.705919 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c5858c67b-t8b5h"] Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.716530 4813 scope.go:117] "RemoveContainer" containerID="a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c" Mar 20 15:57:57 crc kubenswrapper[4813]: E0320 15:57:57.716890 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c\": container with ID starting with a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c not found: ID does not exist" containerID="a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.716921 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c"} err="failed to get container status \"a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c\": rpc error: code = NotFound desc = could not find container \"a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c\": container with ID starting with a429e7734c6a0edd6a14c61fe6a01784b2d896336d7c3daef94240bc8a98571c not found: ID does not exist" Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.718673 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-646f48576b-bmc92"] Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.724042 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-init-646f48576b-bmc92"] Mar 20 15:57:57 crc kubenswrapper[4813]: I0320 15:57:57.742866 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whldn\" (UniqueName: \"kubernetes.io/projected/eee8c4ad-1041-4c34-8c8f-b2feb75fbee8-kube-api-access-whldn\") on node \"crc\" DevicePath \"\"" Mar 20 15:57:59 crc kubenswrapper[4813]: I0320 15:57:59.285201 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" path="/var/lib/kubelet/pods/758fbe07-6d40-4287-8b3b-e1c88bcb9665/volumes" Mar 20 15:57:59 crc kubenswrapper[4813]: I0320 15:57:59.287959 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eee8c4ad-1041-4c34-8c8f-b2feb75fbee8" path="/var/lib/kubelet/pods/eee8c4ad-1041-4c34-8c8f-b2feb75fbee8/volumes" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.143156 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567038-9tndc"] Mar 20 15:58:00 crc kubenswrapper[4813]: E0320 15:58:00.143789 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" containerName="manager" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.143896 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" containerName="manager" Mar 20 15:58:00 crc kubenswrapper[4813]: E0320 15:58:00.143989 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eee8c4ad-1041-4c34-8c8f-b2feb75fbee8" containerName="operator" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.144061 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="eee8c4ad-1041-4c34-8c8f-b2feb75fbee8" containerName="operator" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.144253 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="758fbe07-6d40-4287-8b3b-e1c88bcb9665" containerName="manager" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.144347 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="eee8c4ad-1041-4c34-8c8f-b2feb75fbee8" containerName="operator" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.144950 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567038-9tndc" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.147531 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.148035 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.148089 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.154626 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567038-9tndc"] Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.280931 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6bbn\" (UniqueName: \"kubernetes.io/projected/dffeaf94-f594-4a3e-8672-fbcf49398f05-kube-api-access-d6bbn\") pod \"auto-csr-approver-29567038-9tndc\" (UID: \"dffeaf94-f594-4a3e-8672-fbcf49398f05\") " pod="openshift-infra/auto-csr-approver-29567038-9tndc" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.382682 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6bbn\" (UniqueName: \"kubernetes.io/projected/dffeaf94-f594-4a3e-8672-fbcf49398f05-kube-api-access-d6bbn\") pod \"auto-csr-approver-29567038-9tndc\" (UID: \"dffeaf94-f594-4a3e-8672-fbcf49398f05\") " pod="openshift-infra/auto-csr-approver-29567038-9tndc" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.403977 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6bbn\" (UniqueName: \"kubernetes.io/projected/dffeaf94-f594-4a3e-8672-fbcf49398f05-kube-api-access-d6bbn\") pod \"auto-csr-approver-29567038-9tndc\" (UID: \"dffeaf94-f594-4a3e-8672-fbcf49398f05\") " pod="openshift-infra/auto-csr-approver-29567038-9tndc" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.464740 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567038-9tndc" Mar 20 15:58:00 crc kubenswrapper[4813]: I0320 15:58:00.765262 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567038-9tndc"] Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.151906 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-index-5c49m"] Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.153049 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.155228 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-index-dockercfg-f8wcc" Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.166344 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-5c49m"] Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.301697 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbjlj\" (UniqueName: \"kubernetes.io/projected/a75a77fe-87c9-4704-8698-f6b526264fea-kube-api-access-cbjlj\") pod \"watcher-operator-index-5c49m\" (UID: \"a75a77fe-87c9-4704-8698-f6b526264fea\") " pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.403305 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbjlj\" (UniqueName: \"kubernetes.io/projected/a75a77fe-87c9-4704-8698-f6b526264fea-kube-api-access-cbjlj\") pod \"watcher-operator-index-5c49m\" (UID: \"a75a77fe-87c9-4704-8698-f6b526264fea\") " pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.434609 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbjlj\" (UniqueName: \"kubernetes.io/projected/a75a77fe-87c9-4704-8698-f6b526264fea-kube-api-access-cbjlj\") pod \"watcher-operator-index-5c49m\" (UID: \"a75a77fe-87c9-4704-8698-f6b526264fea\") " pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.473698 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.702173 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567038-9tndc" event={"ID":"dffeaf94-f594-4a3e-8672-fbcf49398f05","Type":"ContainerStarted","Data":"00ed2956aab0c56f57ba74b9a701211fb91b179275f7cc00923a7666617e6d3f"} Mar 20 15:58:01 crc kubenswrapper[4813]: I0320 15:58:01.706396 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-5c49m"] Mar 20 15:58:02 crc kubenswrapper[4813]: I0320 15:58:02.710702 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-5c49m" event={"ID":"a75a77fe-87c9-4704-8698-f6b526264fea","Type":"ContainerStarted","Data":"cfeeb9f3e39d50a7ac9f716b2ac55caf8ae1220804ff5951f2dbb36f0307c8b3"} Mar 20 15:58:02 crc kubenswrapper[4813]: I0320 15:58:02.711303 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-5c49m" event={"ID":"a75a77fe-87c9-4704-8698-f6b526264fea","Type":"ContainerStarted","Data":"064ab38fc87e8241c844e732042e9761fa605dc26bd84f653c97c307c8a162f3"} Mar 20 15:58:02 crc kubenswrapper[4813]: I0320 15:58:02.712817 4813 generic.go:334] "Generic (PLEG): container finished" podID="dffeaf94-f594-4a3e-8672-fbcf49398f05" containerID="6186cce06bed4cf3ee6e4df1bd3da424b20239dd1ddf2135eadbf98430890c0e" exitCode=0 Mar 20 15:58:02 crc kubenswrapper[4813]: I0320 15:58:02.712860 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567038-9tndc" event={"ID":"dffeaf94-f594-4a3e-8672-fbcf49398f05","Type":"ContainerDied","Data":"6186cce06bed4cf3ee6e4df1bd3da424b20239dd1ddf2135eadbf98430890c0e"} Mar 20 15:58:02 crc kubenswrapper[4813]: I0320 15:58:02.724111 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-index-5c49m" podStartSLOduration=1.552588592 podStartE2EDuration="1.72409314s" podCreationTimestamp="2026-03-20 15:58:01 +0000 UTC" firstStartedPulling="2026-03-20 15:58:01.716163009 +0000 UTC m=+1211.138865850" lastFinishedPulling="2026-03-20 15:58:01.887667557 +0000 UTC m=+1211.310370398" observedRunningTime="2026-03-20 15:58:02.721519671 +0000 UTC m=+1212.144222512" watchObservedRunningTime="2026-03-20 15:58:02.72409314 +0000 UTC m=+1212.146795981" Mar 20 15:58:04 crc kubenswrapper[4813]: I0320 15:58:04.003804 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567038-9tndc" Mar 20 15:58:04 crc kubenswrapper[4813]: I0320 15:58:04.141718 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6bbn\" (UniqueName: \"kubernetes.io/projected/dffeaf94-f594-4a3e-8672-fbcf49398f05-kube-api-access-d6bbn\") pod \"dffeaf94-f594-4a3e-8672-fbcf49398f05\" (UID: \"dffeaf94-f594-4a3e-8672-fbcf49398f05\") " Mar 20 15:58:04 crc kubenswrapper[4813]: I0320 15:58:04.149353 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dffeaf94-f594-4a3e-8672-fbcf49398f05-kube-api-access-d6bbn" (OuterVolumeSpecName: "kube-api-access-d6bbn") pod "dffeaf94-f594-4a3e-8672-fbcf49398f05" (UID: "dffeaf94-f594-4a3e-8672-fbcf49398f05"). InnerVolumeSpecName "kube-api-access-d6bbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:58:04 crc kubenswrapper[4813]: I0320 15:58:04.243525 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6bbn\" (UniqueName: \"kubernetes.io/projected/dffeaf94-f594-4a3e-8672-fbcf49398f05-kube-api-access-d6bbn\") on node \"crc\" DevicePath \"\"" Mar 20 15:58:04 crc kubenswrapper[4813]: I0320 15:58:04.728716 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567038-9tndc" event={"ID":"dffeaf94-f594-4a3e-8672-fbcf49398f05","Type":"ContainerDied","Data":"00ed2956aab0c56f57ba74b9a701211fb91b179275f7cc00923a7666617e6d3f"} Mar 20 15:58:04 crc kubenswrapper[4813]: I0320 15:58:04.728755 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00ed2956aab0c56f57ba74b9a701211fb91b179275f7cc00923a7666617e6d3f" Mar 20 15:58:04 crc kubenswrapper[4813]: I0320 15:58:04.728835 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567038-9tndc" Mar 20 15:58:05 crc kubenswrapper[4813]: I0320 15:58:05.068035 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567032-rhfbl"] Mar 20 15:58:05 crc kubenswrapper[4813]: I0320 15:58:05.075316 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567032-rhfbl"] Mar 20 15:58:05 crc kubenswrapper[4813]: I0320 15:58:05.279883 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="258cc7b6-bd0b-4452-911e-7b8091b7c9f0" path="/var/lib/kubelet/pods/258cc7b6-bd0b-4452-911e-7b8091b7c9f0/volumes" Mar 20 15:58:11 crc kubenswrapper[4813]: I0320 15:58:11.474676 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:11 crc kubenswrapper[4813]: I0320 15:58:11.475575 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:11 crc kubenswrapper[4813]: I0320 15:58:11.526944 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:11 crc kubenswrapper[4813]: I0320 15:58:11.832375 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-index-5c49m" Mar 20 15:58:12 crc kubenswrapper[4813]: I0320 15:58:12.345190 4813 scope.go:117] "RemoveContainer" containerID="b2b727a421c9f3999dd473266cdad1a4537e7af623db0ba1517fb3c71d854c3f" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.174824 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2"] Mar 20 15:58:13 crc kubenswrapper[4813]: E0320 15:58:13.175115 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffeaf94-f594-4a3e-8672-fbcf49398f05" containerName="oc" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.175128 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffeaf94-f594-4a3e-8672-fbcf49398f05" containerName="oc" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.175270 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="dffeaf94-f594-4a3e-8672-fbcf49398f05" containerName="oc" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.176258 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.178026 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-vzwcd" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.190030 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2"] Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.288503 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-bundle\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.288559 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-util\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.288646 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr4hl\" (UniqueName: \"kubernetes.io/projected/bfe1f060-6370-4060-8b43-4e6e95c1f018-kube-api-access-jr4hl\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.389548 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr4hl\" (UniqueName: \"kubernetes.io/projected/bfe1f060-6370-4060-8b43-4e6e95c1f018-kube-api-access-jr4hl\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.389795 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-bundle\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.389841 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-util\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.391432 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-util\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.391861 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-bundle\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.415713 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr4hl\" (UniqueName: \"kubernetes.io/projected/bfe1f060-6370-4060-8b43-4e6e95c1f018-kube-api-access-jr4hl\") pod \"8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.503609 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:13 crc kubenswrapper[4813]: I0320 15:58:13.939405 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2"] Mar 20 15:58:14 crc kubenswrapper[4813]: I0320 15:58:14.806211 4813 generic.go:334] "Generic (PLEG): container finished" podID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerID="fa208033edc179b3b1df285adcef0c073563662a59cf9be48f87fa21290a6dd6" exitCode=0 Mar 20 15:58:14 crc kubenswrapper[4813]: I0320 15:58:14.806345 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" event={"ID":"bfe1f060-6370-4060-8b43-4e6e95c1f018","Type":"ContainerDied","Data":"fa208033edc179b3b1df285adcef0c073563662a59cf9be48f87fa21290a6dd6"} Mar 20 15:58:14 crc kubenswrapper[4813]: I0320 15:58:14.806508 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" event={"ID":"bfe1f060-6370-4060-8b43-4e6e95c1f018","Type":"ContainerStarted","Data":"a9053909978f981c954784f45e9e405fb3c0b48b321c611ee8c4c7150f2b90b8"} Mar 20 15:58:14 crc kubenswrapper[4813]: I0320 15:58:14.808084 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 15:58:15 crc kubenswrapper[4813]: I0320 15:58:15.813783 4813 generic.go:334] "Generic (PLEG): container finished" podID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerID="d3f34ab8aa9964dbec5da7b928dd5a3145bb06f6aeb3cbcbba942fd5a89b1ee1" exitCode=0 Mar 20 15:58:15 crc kubenswrapper[4813]: I0320 15:58:15.813843 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" event={"ID":"bfe1f060-6370-4060-8b43-4e6e95c1f018","Type":"ContainerDied","Data":"d3f34ab8aa9964dbec5da7b928dd5a3145bb06f6aeb3cbcbba942fd5a89b1ee1"} Mar 20 15:58:16 crc kubenswrapper[4813]: I0320 15:58:16.826330 4813 generic.go:334] "Generic (PLEG): container finished" podID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerID="ea54dbf62b91e45cf53d22cda7407504b114c587bca53380c39eb76cc917907b" exitCode=0 Mar 20 15:58:16 crc kubenswrapper[4813]: I0320 15:58:16.826423 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" event={"ID":"bfe1f060-6370-4060-8b43-4e6e95c1f018","Type":"ContainerDied","Data":"ea54dbf62b91e45cf53d22cda7407504b114c587bca53380c39eb76cc917907b"} Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.191774 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.250224 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jr4hl\" (UniqueName: \"kubernetes.io/projected/bfe1f060-6370-4060-8b43-4e6e95c1f018-kube-api-access-jr4hl\") pod \"bfe1f060-6370-4060-8b43-4e6e95c1f018\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.250323 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-bundle\") pod \"bfe1f060-6370-4060-8b43-4e6e95c1f018\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.250380 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-util\") pod \"bfe1f060-6370-4060-8b43-4e6e95c1f018\" (UID: \"bfe1f060-6370-4060-8b43-4e6e95c1f018\") " Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.253140 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-bundle" (OuterVolumeSpecName: "bundle") pod "bfe1f060-6370-4060-8b43-4e6e95c1f018" (UID: "bfe1f060-6370-4060-8b43-4e6e95c1f018"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.258100 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfe1f060-6370-4060-8b43-4e6e95c1f018-kube-api-access-jr4hl" (OuterVolumeSpecName: "kube-api-access-jr4hl") pod "bfe1f060-6370-4060-8b43-4e6e95c1f018" (UID: "bfe1f060-6370-4060-8b43-4e6e95c1f018"). InnerVolumeSpecName "kube-api-access-jr4hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.283637 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-util" (OuterVolumeSpecName: "util") pod "bfe1f060-6370-4060-8b43-4e6e95c1f018" (UID: "bfe1f060-6370-4060-8b43-4e6e95c1f018"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.352281 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jr4hl\" (UniqueName: \"kubernetes.io/projected/bfe1f060-6370-4060-8b43-4e6e95c1f018-kube-api-access-jr4hl\") on node \"crc\" DevicePath \"\"" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.352356 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.352396 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bfe1f060-6370-4060-8b43-4e6e95c1f018-util\") on node \"crc\" DevicePath \"\"" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.843159 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" event={"ID":"bfe1f060-6370-4060-8b43-4e6e95c1f018","Type":"ContainerDied","Data":"a9053909978f981c954784f45e9e405fb3c0b48b321c611ee8c4c7150f2b90b8"} Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.843197 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9053909978f981c954784f45e9e405fb3c0b48b321c611ee8c4c7150f2b90b8" Mar 20 15:58:18 crc kubenswrapper[4813]: I0320 15:58:18.843256 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.676984 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z"] Mar 20 15:58:24 crc kubenswrapper[4813]: E0320 15:58:24.677906 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerName="util" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.677921 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerName="util" Mar 20 15:58:24 crc kubenswrapper[4813]: E0320 15:58:24.677938 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerName="pull" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.677946 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerName="pull" Mar 20 15:58:24 crc kubenswrapper[4813]: E0320 15:58:24.677955 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerName="extract" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.677963 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerName="extract" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.678158 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfe1f060-6370-4060-8b43-4e6e95c1f018" containerName="extract" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.678797 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.689742 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-service-cert" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.696228 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-vsh7l" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.708886 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z"] Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.739702 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19a528ae-3ae2-48a5-88cb-04a92b073043-webhook-cert\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.739884 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rq4x\" (UniqueName: \"kubernetes.io/projected/19a528ae-3ae2-48a5-88cb-04a92b073043-kube-api-access-5rq4x\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.739910 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19a528ae-3ae2-48a5-88cb-04a92b073043-apiservice-cert\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.840321 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rq4x\" (UniqueName: \"kubernetes.io/projected/19a528ae-3ae2-48a5-88cb-04a92b073043-kube-api-access-5rq4x\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.840369 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19a528ae-3ae2-48a5-88cb-04a92b073043-apiservice-cert\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.840393 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19a528ae-3ae2-48a5-88cb-04a92b073043-webhook-cert\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.845661 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19a528ae-3ae2-48a5-88cb-04a92b073043-webhook-cert\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.847087 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19a528ae-3ae2-48a5-88cb-04a92b073043-apiservice-cert\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.855347 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rq4x\" (UniqueName: \"kubernetes.io/projected/19a528ae-3ae2-48a5-88cb-04a92b073043-kube-api-access-5rq4x\") pod \"watcher-operator-controller-manager-77c9f8cb5b-z626z\" (UID: \"19a528ae-3ae2-48a5-88cb-04a92b073043\") " pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:24 crc kubenswrapper[4813]: I0320 15:58:24.996855 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:25 crc kubenswrapper[4813]: W0320 15:58:25.554598 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19a528ae_3ae2_48a5_88cb_04a92b073043.slice/crio-27adb73f2d0344e646128a3e92073e646334dd564e06b07c28c1df7de34bb650 WatchSource:0}: Error finding container 27adb73f2d0344e646128a3e92073e646334dd564e06b07c28c1df7de34bb650: Status 404 returned error can't find the container with id 27adb73f2d0344e646128a3e92073e646334dd564e06b07c28c1df7de34bb650 Mar 20 15:58:25 crc kubenswrapper[4813]: I0320 15:58:25.566584 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z"] Mar 20 15:58:25 crc kubenswrapper[4813]: I0320 15:58:25.895820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" event={"ID":"19a528ae-3ae2-48a5-88cb-04a92b073043","Type":"ContainerStarted","Data":"d46551157c6503d450680d60c9d3d8d22046a14d06d1435a4ae5f796e9654e42"} Mar 20 15:58:25 crc kubenswrapper[4813]: I0320 15:58:25.896291 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:25 crc kubenswrapper[4813]: I0320 15:58:25.896313 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" event={"ID":"19a528ae-3ae2-48a5-88cb-04a92b073043","Type":"ContainerStarted","Data":"27adb73f2d0344e646128a3e92073e646334dd564e06b07c28c1df7de34bb650"} Mar 20 15:58:25 crc kubenswrapper[4813]: I0320 15:58:25.913593 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" podStartSLOduration=1.913577918 podStartE2EDuration="1.913577918s" podCreationTimestamp="2026-03-20 15:58:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:58:25.911239565 +0000 UTC m=+1235.333942406" watchObservedRunningTime="2026-03-20 15:58:25.913577918 +0000 UTC m=+1235.336280749" Mar 20 15:58:35 crc kubenswrapper[4813]: I0320 15:58:35.004376 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-77c9f8cb5b-z626z" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.590365 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/rabbitmq-server-0"] Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.592225 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.594418 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openshift-service-ca.crt" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.594785 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-plugins-conf" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.594844 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-erlang-cookie" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.595421 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-rabbitmq-svc" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.595501 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-server-conf" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.595521 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-config-data" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.595647 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-default-user" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.595727 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-server-dockercfg-6tmzk" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.604896 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"kube-root-ca.crt" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.606871 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-server-0"] Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734055 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734103 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734157 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-config-data\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734224 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734253 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734271 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734295 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrg7k\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-kube-api-access-lrg7k\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734311 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734329 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734370 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.734392 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835232 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835291 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835336 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-config-data\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835400 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835429 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835448 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835471 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrg7k\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-kube-api-access-lrg7k\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835552 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835574 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835615 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.835640 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.836640 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.837021 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.837521 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.837713 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.838233 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-config-data\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.839317 4813 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.839403 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/630038598fab6f2788b3ef322828cebca93d9080280e9634cafc1b9f341187a8/globalmount\"" pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.842308 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.845239 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.847997 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.848873 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.859133 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrg7k\" (UniqueName: \"kubernetes.io/projected/ec1149a7-ab74-4cc6-9e54-66a6136d41ac-kube-api-access-lrg7k\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.888045 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5abfda44-f314-465e-91cf-aee1f5b26db6\") pod \"rabbitmq-server-0\" (UID: \"ec1149a7-ab74-4cc6-9e54-66a6136d41ac\") " pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:46 crc kubenswrapper[4813]: I0320 15:58:46.918843 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.182650 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/rabbitmq-notifications-server-0"] Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.185140 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.188719 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-notifications-server-dockercfg-rphzc" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.189010 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-notifications-erlang-cookie" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.189159 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-notifications-plugins-conf" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.189331 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-notifications-config-data" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.189499 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-rabbitmq-notifications-svc" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.189647 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-notifications-server-conf" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.189802 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-notifications-default-user" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.199592 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-notifications-server-0"] Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.259816 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.259874 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.259898 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.259938 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0f7bff9e-8c2a-478e-a30e-55d5be1df762-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.259983 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0f7bff9e-8c2a-478e-a30e-55d5be1df762-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.260006 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.260034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.260061 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.260091 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.260120 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.260139 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjqv8\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-kube-api-access-tjqv8\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360649 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360721 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360737 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0f7bff9e-8c2a-478e-a30e-55d5be1df762-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360764 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0f7bff9e-8c2a-478e-a30e-55d5be1df762-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360792 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360823 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360841 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360875 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360896 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.360909 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjqv8\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-kube-api-access-tjqv8\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.362022 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.362988 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.364076 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.364370 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.367381 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0f7bff9e-8c2a-478e-a30e-55d5be1df762-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.367442 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.368259 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0f7bff9e-8c2a-478e-a30e-55d5be1df762-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.386767 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.390193 4813 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.390237 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8b107d6caf633cf652ace4c09a4f613a01cb74da73d27f9ab068610ff0bbca23/globalmount\"" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.392213 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0f7bff9e-8c2a-478e-a30e-55d5be1df762-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.404893 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjqv8\" (UniqueName: \"kubernetes.io/projected/0f7bff9e-8c2a-478e-a30e-55d5be1df762-kube-api-access-tjqv8\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.414307 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-server-0"] Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.443301 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5dd7044e-d944-431d-9bfa-01e3c571e7a2\") pod \"rabbitmq-notifications-server-0\" (UID: \"0f7bff9e-8c2a-478e-a30e-55d5be1df762\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:47 crc kubenswrapper[4813]: I0320 15:58:47.535863 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.049988 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-notifications-server-0"] Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.053900 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"0f7bff9e-8c2a-478e-a30e-55d5be1df762","Type":"ContainerStarted","Data":"2599d5279d363e1809698266f5db6b3bbb7b72e892e01e3379939ce605f4f981"} Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.055785 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"ec1149a7-ab74-4cc6-9e54-66a6136d41ac","Type":"ContainerStarted","Data":"f89ef482b89e5cfa45531a8b3d2a5221ae436a96be04cf362a48a454a40c1616"} Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.769119 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/openstack-galera-0"] Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.771021 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.777184 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"galera-openstack-dockercfg-crdfg" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.778505 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-galera-openstack-svc" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.779281 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openstack-scripts" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.779446 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openstack-config-data" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.781422 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstack-galera-0"] Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.785031 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"combined-ca-bundle" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.879802 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3367f79a-2519-4c2d-8563-fac811678ed3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.879866 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.879894 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-config-data-default\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.879917 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-kolla-config\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.879955 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.879987 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgbmb\" (UniqueName: \"kubernetes.io/projected/3367f79a-2519-4c2d-8563-fac811678ed3-kube-api-access-xgbmb\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.880007 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3367f79a-2519-4c2d-8563-fac811678ed3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.880069 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3367f79a-2519-4c2d-8563-fac811678ed3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.980913 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-config-data-default\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.980964 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-kolla-config\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.981005 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.981038 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgbmb\" (UniqueName: \"kubernetes.io/projected/3367f79a-2519-4c2d-8563-fac811678ed3-kube-api-access-xgbmb\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.981060 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3367f79a-2519-4c2d-8563-fac811678ed3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.981088 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3367f79a-2519-4c2d-8563-fac811678ed3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.981124 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3367f79a-2519-4c2d-8563-fac811678ed3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.981147 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.981716 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3367f79a-2519-4c2d-8563-fac811678ed3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.982406 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.983129 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-kolla-config\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.983338 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3367f79a-2519-4c2d-8563-fac811678ed3-config-data-default\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.986035 4813 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.986083 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/834c223aae9f468dfe9db00312027e468e39b8e5e28a458038a21ef97f21f8c1/globalmount\"" pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:48 crc kubenswrapper[4813]: I0320 15:58:48.987292 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3367f79a-2519-4c2d-8563-fac811678ed3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.000464 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3367f79a-2519-4c2d-8563-fac811678ed3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.003308 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgbmb\" (UniqueName: \"kubernetes.io/projected/3367f79a-2519-4c2d-8563-fac811678ed3-kube-api-access-xgbmb\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.038292 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0602b07b-d2e0-4ed7-8f4e-be1f97cdd78c\") pod \"openstack-galera-0\" (UID: \"3367f79a-2519-4c2d-8563-fac811678ed3\") " pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.137154 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.138276 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.141127 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-memcached-svc" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.141127 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"memcached-config-data" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.142139 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"memcached-memcached-dockercfg-g4cpw" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.149136 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.155610 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.297477 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-config-data\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.297908 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-memcached-tls-certs\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.297931 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-kolla-config\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.297973 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhfkf\" (UniqueName: \"kubernetes.io/projected/925d9de3-2778-411a-8e7a-2af03ebc8439-kube-api-access-bhfkf\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.298019 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-combined-ca-bundle\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.394322 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.395573 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.399346 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"telemetry-ceilometer-dockercfg-q4p6n" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.400270 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-combined-ca-bundle\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.400327 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-config-data\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.400409 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-memcached-tls-certs\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.400559 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-kolla-config\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.401366 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-config-data\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.402079 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-kolla-config\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.403772 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhfkf\" (UniqueName: \"kubernetes.io/projected/925d9de3-2778-411a-8e7a-2af03ebc8439-kube-api-access-bhfkf\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.417420 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-memcached-tls-certs\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.420009 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.426221 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-combined-ca-bundle\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.437746 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhfkf\" (UniqueName: \"kubernetes.io/projected/925d9de3-2778-411a-8e7a-2af03ebc8439-kube-api-access-bhfkf\") pod \"memcached-0\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.464931 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.505929 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdm6n\" (UniqueName: \"kubernetes.io/projected/7ff4d7e5-cf19-41e9-b489-3361f7eb2fca-kube-api-access-jdm6n\") pod \"kube-state-metrics-0\" (UID: \"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.606891 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdm6n\" (UniqueName: \"kubernetes.io/projected/7ff4d7e5-cf19-41e9-b489-3361f7eb2fca-kube-api-access-jdm6n\") pod \"kube-state-metrics-0\" (UID: \"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.628753 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdm6n\" (UniqueName: \"kubernetes.io/projected/7ff4d7e5-cf19-41e9-b489-3361f7eb2fca-kube-api-access-jdm6n\") pod \"kube-state-metrics-0\" (UID: \"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.765206 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstack-galera-0"] Mar 20 15:58:49 crc kubenswrapper[4813]: I0320 15:58:49.765215 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.083913 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.097820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"3367f79a-2519-4c2d-8563-fac811678ed3","Type":"ContainerStarted","Data":"141447c55b98dd0d688202c6fde537a9970f0ad3a954d023a45938761ae5d678"} Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.102725 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/alertmanager-metric-storage-0"] Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.104808 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.124767 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-tls-assets-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.125023 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-generated" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.124774 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"metric-storage-alertmanager-dockercfg-pkckz" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.125167 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-web-config" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.125333 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-cluster-tls-config" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.144923 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/alertmanager-metric-storage-0"] Mar 20 15:58:50 crc kubenswrapper[4813]: W0320 15:58:50.193548 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod925d9de3_2778_411a_8e7a_2af03ebc8439.slice/crio-95555513352c667376fcfa440a039d570a50c9975f69b08375b267006414ed7a WatchSource:0}: Error finding container 95555513352c667376fcfa440a039d570a50c9975f69b08375b267006414ed7a: Status 404 returned error can't find the container with id 95555513352c667376fcfa440a039d570a50c9975f69b08375b267006414ed7a Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.221242 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6dcc5c20-eb16-44c2-be60-17a397527235-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.221292 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.221358 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6dcc5c20-eb16-44c2-be60-17a397527235-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.221382 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.221431 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.221465 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpdfd\" (UniqueName: \"kubernetes.io/projected/6dcc5c20-eb16-44c2-be60-17a397527235-kube-api-access-tpdfd\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.221498 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6dcc5c20-eb16-44c2-be60-17a397527235-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.322771 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.322827 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpdfd\" (UniqueName: \"kubernetes.io/projected/6dcc5c20-eb16-44c2-be60-17a397527235-kube-api-access-tpdfd\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.322853 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6dcc5c20-eb16-44c2-be60-17a397527235-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.322882 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6dcc5c20-eb16-44c2-be60-17a397527235-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.322906 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.322937 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6dcc5c20-eb16-44c2-be60-17a397527235-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.322960 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.324007 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6dcc5c20-eb16-44c2-be60-17a397527235-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.329818 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6dcc5c20-eb16-44c2-be60-17a397527235-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.332066 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6dcc5c20-eb16-44c2-be60-17a397527235-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.333343 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.333877 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.336160 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/6dcc5c20-eb16-44c2-be60-17a397527235-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.352270 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpdfd\" (UniqueName: \"kubernetes.io/projected/6dcc5c20-eb16-44c2-be60-17a397527235-kube-api-access-tpdfd\") pod \"alertmanager-metric-storage-0\" (UID: \"6dcc5c20-eb16-44c2-be60-17a397527235\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.459792 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.478083 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 15:58:50 crc kubenswrapper[4813]: W0320 15:58:50.506881 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff4d7e5_cf19_41e9_b489_3361f7eb2fca.slice/crio-4fcffeb23877fd7f6e560152a370a94b2df681834c220843f8ea9ac5c4761172 WatchSource:0}: Error finding container 4fcffeb23877fd7f6e560152a370a94b2df681834c220843f8ea9ac5c4761172: Status 404 returned error can't find the container with id 4fcffeb23877fd7f6e560152a370a94b2df681834c220843f8ea9ac5c4761172 Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.643067 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn"] Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.647104 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.651754 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.652190 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards-sa-dockercfg-rbv6q" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.673039 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn"] Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.844185 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c22deaf-5054-462f-bc80-f21fedd71c41-serving-cert\") pod \"observability-ui-dashboards-7f87b9b85b-6t9mn\" (UID: \"6c22deaf-5054-462f-bc80-f21fedd71c41\") " pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.844275 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9jff\" (UniqueName: \"kubernetes.io/projected/6c22deaf-5054-462f-bc80-f21fedd71c41-kube-api-access-d9jff\") pod \"observability-ui-dashboards-7f87b9b85b-6t9mn\" (UID: \"6c22deaf-5054-462f-bc80-f21fedd71c41\") " pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.946352 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c22deaf-5054-462f-bc80-f21fedd71c41-serving-cert\") pod \"observability-ui-dashboards-7f87b9b85b-6t9mn\" (UID: \"6c22deaf-5054-462f-bc80-f21fedd71c41\") " pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.946434 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9jff\" (UniqueName: \"kubernetes.io/projected/6c22deaf-5054-462f-bc80-f21fedd71c41-kube-api-access-d9jff\") pod \"observability-ui-dashboards-7f87b9b85b-6t9mn\" (UID: \"6c22deaf-5054-462f-bc80-f21fedd71c41\") " pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.947982 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7b878968-6wrvq"] Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.948965 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.952397 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c22deaf-5054-462f-bc80-f21fedd71c41-serving-cert\") pod \"observability-ui-dashboards-7f87b9b85b-6t9mn\" (UID: \"6c22deaf-5054-462f-bc80-f21fedd71c41\") " pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.985295 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b878968-6wrvq"] Mar 20 15:58:50 crc kubenswrapper[4813]: I0320 15:58:50.995450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9jff\" (UniqueName: \"kubernetes.io/projected/6c22deaf-5054-462f-bc80-f21fedd71c41-kube-api-access-d9jff\") pod \"observability-ui-dashboards-7f87b9b85b-6t9mn\" (UID: \"6c22deaf-5054-462f-bc80-f21fedd71c41\") " pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.049357 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-serving-cert\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.049690 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-trusted-ca-bundle\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.049728 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-oauth-serving-cert\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.049779 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bw5c\" (UniqueName: \"kubernetes.io/projected/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-kube-api-access-8bw5c\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.049920 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-oauth-config\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.049962 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-config\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.050012 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-service-ca\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.116547 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca","Type":"ContainerStarted","Data":"4fcffeb23877fd7f6e560152a370a94b2df681834c220843f8ea9ac5c4761172"} Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.118011 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"925d9de3-2778-411a-8e7a-2af03ebc8439","Type":"ContainerStarted","Data":"95555513352c667376fcfa440a039d570a50c9975f69b08375b267006414ed7a"} Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.135359 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/alertmanager-metric-storage-0"] Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.153749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-serving-cert\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.153872 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-trusted-ca-bundle\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.153896 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-oauth-serving-cert\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.153921 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bw5c\" (UniqueName: \"kubernetes.io/projected/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-kube-api-access-8bw5c\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.153948 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-oauth-config\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.153978 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-config\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.154015 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-service-ca\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.155696 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-service-ca\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.156166 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-trusted-ca-bundle\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.156413 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-config\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.158423 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-oauth-serving-cert\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.164175 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-serving-cert\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.165907 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-console-oauth-config\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.185540 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bw5c\" (UniqueName: \"kubernetes.io/projected/e349ac25-222d-45bd-80c6-d2a7f74bbbdc-kube-api-access-8bw5c\") pod \"console-7b878968-6wrvq\" (UID: \"e349ac25-222d-45bd-80c6-d2a7f74bbbdc\") " pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.189403 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.191347 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.195345 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-web-config" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.195760 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-tls-assets-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.195910 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.197386 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.197526 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-1" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.197778 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"metric-storage-prometheus-dockercfg-8v6vp" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.198596 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.198813 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-2" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.225113 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.294013 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.357960 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369043 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369103 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369158 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx829\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-kube-api-access-lx829\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369242 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369300 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369339 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369376 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369405 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369434 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.369454 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.470868 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.470930 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.470970 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.470997 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.471024 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.471601 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.471641 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx829\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-kube-api-access-lx829\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.471713 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.471825 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.471857 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.471718 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.472277 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.474287 4813 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.474320 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/504962cc396d8162f5a974b64cf1cdd7337fdfdd024497e20b3aeb0802e6d7eb/globalmount\"" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.475026 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.475450 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-web-config" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.476025 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.476671 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.478861 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.482433 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-tls-assets-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.485755 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.486637 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.492473 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.501379 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.501427 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx829\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-kube-api-access-lx829\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.538739 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.568261 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"metric-storage-prometheus-dockercfg-8v6vp" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.576816 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:58:51 crc kubenswrapper[4813]: I0320 15:58:51.969408 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b878968-6wrvq"] Mar 20 15:58:51 crc kubenswrapper[4813]: W0320 15:58:51.970861 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode349ac25_222d_45bd_80c6_d2a7f74bbbdc.slice/crio-24d0b9550fed781e4d8bd716d7ceb1f9cc98698b95d7e7093f3b92d9b22ed783 WatchSource:0}: Error finding container 24d0b9550fed781e4d8bd716d7ceb1f9cc98698b95d7e7093f3b92d9b22ed783: Status 404 returned error can't find the container with id 24d0b9550fed781e4d8bd716d7ceb1f9cc98698b95d7e7093f3b92d9b22ed783 Mar 20 15:58:52 crc kubenswrapper[4813]: I0320 15:58:52.040517 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn"] Mar 20 15:58:52 crc kubenswrapper[4813]: W0320 15:58:52.047393 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c22deaf_5054_462f_bc80_f21fedd71c41.slice/crio-d50c97b11fc2e80b75bc30a00c7a6372e36f7ba7b5a8d9ff751a0b5c70b1775b WatchSource:0}: Error finding container d50c97b11fc2e80b75bc30a00c7a6372e36f7ba7b5a8d9ff751a0b5c70b1775b: Status 404 returned error can't find the container with id d50c97b11fc2e80b75bc30a00c7a6372e36f7ba7b5a8d9ff751a0b5c70b1775b Mar 20 15:58:52 crc kubenswrapper[4813]: I0320 15:58:52.127383 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" event={"ID":"6c22deaf-5054-462f-bc80-f21fedd71c41","Type":"ContainerStarted","Data":"d50c97b11fc2e80b75bc30a00c7a6372e36f7ba7b5a8d9ff751a0b5c70b1775b"} Mar 20 15:58:52 crc kubenswrapper[4813]: I0320 15:58:52.129244 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"6dcc5c20-eb16-44c2-be60-17a397527235","Type":"ContainerStarted","Data":"37cb851a503dba0b3eebcc146f7e903706c3d349d5083b264c376d0bb53bd59e"} Mar 20 15:58:52 crc kubenswrapper[4813]: I0320 15:58:52.130261 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b878968-6wrvq" event={"ID":"e349ac25-222d-45bd-80c6-d2a7f74bbbdc","Type":"ContainerStarted","Data":"24d0b9550fed781e4d8bd716d7ceb1f9cc98698b95d7e7093f3b92d9b22ed783"} Mar 20 15:58:52 crc kubenswrapper[4813]: I0320 15:58:52.178911 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:58:52 crc kubenswrapper[4813]: W0320 15:58:52.190777 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6328bb8_f6a3_476c_a58c_d9beec93b7ff.slice/crio-f277d5bbe5949676cb3413c885b785c61d42215f3d7e69fb0907158982a2d3e2 WatchSource:0}: Error finding container f277d5bbe5949676cb3413c885b785c61d42215f3d7e69fb0907158982a2d3e2: Status 404 returned error can't find the container with id f277d5bbe5949676cb3413c885b785c61d42215f3d7e69fb0907158982a2d3e2 Mar 20 15:58:53 crc kubenswrapper[4813]: I0320 15:58:53.149718 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerStarted","Data":"f277d5bbe5949676cb3413c885b785c61d42215f3d7e69fb0907158982a2d3e2"} Mar 20 15:58:53 crc kubenswrapper[4813]: I0320 15:58:53.158746 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b878968-6wrvq" event={"ID":"e349ac25-222d-45bd-80c6-d2a7f74bbbdc","Type":"ContainerStarted","Data":"cabd0ce87ba21f26ee39465c687ed6a6da6bcde781e114bc9fa92cf1f026147b"} Mar 20 15:58:53 crc kubenswrapper[4813]: I0320 15:58:53.198422 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7b878968-6wrvq" podStartSLOduration=3.198402697 podStartE2EDuration="3.198402697s" podCreationTimestamp="2026-03-20 15:58:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:58:53.19336023 +0000 UTC m=+1262.616063071" watchObservedRunningTime="2026-03-20 15:58:53.198402697 +0000 UTC m=+1262.621105538" Mar 20 15:59:01 crc kubenswrapper[4813]: I0320 15:59:01.368459 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:59:01 crc kubenswrapper[4813]: I0320 15:59:01.369054 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:59:01 crc kubenswrapper[4813]: I0320 15:59:01.376784 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:59:02 crc kubenswrapper[4813]: I0320 15:59:02.247100 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7b878968-6wrvq" Mar 20 15:59:02 crc kubenswrapper[4813]: I0320 15:59:02.300607 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-cb4f878bb-p9q64"] Mar 20 15:59:03 crc kubenswrapper[4813]: E0320 15:59:03.146850 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Mar 20 15:59:03 crc kubenswrapper[4813]: E0320 15:59:03.147039 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tjqv8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-notifications-server-0_watcher-kuttl-default(0f7bff9e-8c2a-478e-a30e-55d5be1df762): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 15:59:03 crc kubenswrapper[4813]: E0320 15:59:03.148228 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" podUID="0f7bff9e-8c2a-478e-a30e-55d5be1df762" Mar 20 15:59:03 crc kubenswrapper[4813]: E0320 15:59:03.250844 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" podUID="0f7bff9e-8c2a-478e-a30e-55d5be1df762" Mar 20 15:59:03 crc kubenswrapper[4813]: E0320 15:59:03.434848 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Mar 20 15:59:03 crc kubenswrapper[4813]: E0320 15:59:03.435298 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lrg7k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_watcher-kuttl-default(ec1149a7-ab74-4cc6-9e54-66a6136d41ac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 15:59:03 crc kubenswrapper[4813]: E0320 15:59:03.436508 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="watcher-kuttl-default/rabbitmq-server-0" podUID="ec1149a7-ab74-4cc6-9e54-66a6136d41ac" Mar 20 15:59:04 crc kubenswrapper[4813]: E0320 15:59:04.256901 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="watcher-kuttl-default/rabbitmq-server-0" podUID="ec1149a7-ab74-4cc6-9e54-66a6136d41ac" Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.314415 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"3367f79a-2519-4c2d-8563-fac811678ed3","Type":"ContainerStarted","Data":"d0ab8b9bda9494fecbba3ef3da7297626782c9a573c6f696996a92c91731d768"} Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.316635 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"925d9de3-2778-411a-8e7a-2af03ebc8439","Type":"ContainerStarted","Data":"a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d"} Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.316800 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/memcached-0" Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.318841 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" event={"ID":"6c22deaf-5054-462f-bc80-f21fedd71c41","Type":"ContainerStarted","Data":"8f6eff3264f5e9b61fbeab57239888ac0198267a4e09b259dd620eda0d39c6da"} Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.320527 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca","Type":"ContainerStarted","Data":"b61a1e59b9e371f7ede1a7023b2ac8fcf40bbd9cb139d43ee110280b0562e550"} Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.320725 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.361847 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/kube-state-metrics-0" podStartSLOduration=2.357091304 podStartE2EDuration="21.361829047s" podCreationTimestamp="2026-03-20 15:58:49 +0000 UTC" firstStartedPulling="2026-03-20 15:58:50.510972139 +0000 UTC m=+1259.933674980" lastFinishedPulling="2026-03-20 15:59:09.515709872 +0000 UTC m=+1278.938412723" observedRunningTime="2026-03-20 15:59:10.355391633 +0000 UTC m=+1279.778094474" watchObservedRunningTime="2026-03-20 15:59:10.361829047 +0000 UTC m=+1279.784531888" Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.387640 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/memcached-0" podStartSLOduration=3.320401947 podStartE2EDuration="21.387625706s" podCreationTimestamp="2026-03-20 15:58:49 +0000 UTC" firstStartedPulling="2026-03-20 15:58:50.260729906 +0000 UTC m=+1259.683432747" lastFinishedPulling="2026-03-20 15:59:08.327953665 +0000 UTC m=+1277.750656506" observedRunningTime="2026-03-20 15:59:10.38406202 +0000 UTC m=+1279.806764881" watchObservedRunningTime="2026-03-20 15:59:10.387625706 +0000 UTC m=+1279.810328547" Mar 20 15:59:10 crc kubenswrapper[4813]: I0320 15:59:10.399830 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-ui-dashboards-7f87b9b85b-6t9mn" podStartSLOduration=4.236453775 podStartE2EDuration="20.399813097s" podCreationTimestamp="2026-03-20 15:58:50 +0000 UTC" firstStartedPulling="2026-03-20 15:58:52.050236053 +0000 UTC m=+1261.472938894" lastFinishedPulling="2026-03-20 15:59:08.213595375 +0000 UTC m=+1277.636298216" observedRunningTime="2026-03-20 15:59:10.397662498 +0000 UTC m=+1279.820365349" watchObservedRunningTime="2026-03-20 15:59:10.399813097 +0000 UTC m=+1279.822515938" Mar 20 15:59:12 crc kubenswrapper[4813]: I0320 15:59:12.337270 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerStarted","Data":"2cbb755d773fdf2443169a18bd151cf3e5bf2de6974deb2136e6980ba8c2b374"} Mar 20 15:59:12 crc kubenswrapper[4813]: I0320 15:59:12.338521 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"6dcc5c20-eb16-44c2-be60-17a397527235","Type":"ContainerStarted","Data":"f91a40b318ca8c66345c89c1247312d2fd4de6d3ff58b29a5e7382c80aaae572"} Mar 20 15:59:13 crc kubenswrapper[4813]: I0320 15:59:13.346143 4813 generic.go:334] "Generic (PLEG): container finished" podID="3367f79a-2519-4c2d-8563-fac811678ed3" containerID="d0ab8b9bda9494fecbba3ef3da7297626782c9a573c6f696996a92c91731d768" exitCode=0 Mar 20 15:59:13 crc kubenswrapper[4813]: I0320 15:59:13.346231 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"3367f79a-2519-4c2d-8563-fac811678ed3","Type":"ContainerDied","Data":"d0ab8b9bda9494fecbba3ef3da7297626782c9a573c6f696996a92c91731d768"} Mar 20 15:59:14 crc kubenswrapper[4813]: I0320 15:59:14.358326 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"3367f79a-2519-4c2d-8563-fac811678ed3","Type":"ContainerStarted","Data":"89e99ab0ab8954f5cd0b626a60e0f0b51cb5f0c60236658a9ee01af7b3f340b3"} Mar 20 15:59:14 crc kubenswrapper[4813]: I0320 15:59:14.387330 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/openstack-galera-0" podStartSLOduration=8.590439028 podStartE2EDuration="27.387307356s" podCreationTimestamp="2026-03-20 15:58:47 +0000 UTC" firstStartedPulling="2026-03-20 15:58:49.797948231 +0000 UTC m=+1259.220651082" lastFinishedPulling="2026-03-20 15:59:08.594816569 +0000 UTC m=+1278.017519410" observedRunningTime="2026-03-20 15:59:14.377712286 +0000 UTC m=+1283.800415137" watchObservedRunningTime="2026-03-20 15:59:14.387307356 +0000 UTC m=+1283.810010207" Mar 20 15:59:14 crc kubenswrapper[4813]: I0320 15:59:14.466954 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/memcached-0" Mar 20 15:59:17 crc kubenswrapper[4813]: I0320 15:59:17.380634 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"0f7bff9e-8c2a-478e-a30e-55d5be1df762","Type":"ContainerStarted","Data":"38ea0653d9a0d0d96f21c44cd996c4ce74d06c876744ddd7325dd462bc0ec30a"} Mar 20 15:59:17 crc kubenswrapper[4813]: I0320 15:59:17.383651 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"ec1149a7-ab74-4cc6-9e54-66a6136d41ac","Type":"ContainerStarted","Data":"ba6e108fef263839b5a13a586ea3ceb79329926512c3eea6a5fc690ba681553f"} Mar 20 15:59:18 crc kubenswrapper[4813]: E0320 15:59:18.184232 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6328bb8_f6a3_476c_a58c_d9beec93b7ff.slice/crio-conmon-2cbb755d773fdf2443169a18bd151cf3e5bf2de6974deb2136e6980ba8c2b374.scope\": RecentStats: unable to find data in memory cache]" Mar 20 15:59:18 crc kubenswrapper[4813]: I0320 15:59:18.400742 4813 generic.go:334] "Generic (PLEG): container finished" podID="6dcc5c20-eb16-44c2-be60-17a397527235" containerID="f91a40b318ca8c66345c89c1247312d2fd4de6d3ff58b29a5e7382c80aaae572" exitCode=0 Mar 20 15:59:18 crc kubenswrapper[4813]: I0320 15:59:18.400828 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"6dcc5c20-eb16-44c2-be60-17a397527235","Type":"ContainerDied","Data":"f91a40b318ca8c66345c89c1247312d2fd4de6d3ff58b29a5e7382c80aaae572"} Mar 20 15:59:18 crc kubenswrapper[4813]: I0320 15:59:18.405854 4813 generic.go:334] "Generic (PLEG): container finished" podID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerID="2cbb755d773fdf2443169a18bd151cf3e5bf2de6974deb2136e6980ba8c2b374" exitCode=0 Mar 20 15:59:18 crc kubenswrapper[4813]: I0320 15:59:18.405918 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerDied","Data":"2cbb755d773fdf2443169a18bd151cf3e5bf2de6974deb2136e6980ba8c2b374"} Mar 20 15:59:19 crc kubenswrapper[4813]: I0320 15:59:19.149581 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:59:19 crc kubenswrapper[4813]: I0320 15:59:19.150002 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:59:19 crc kubenswrapper[4813]: I0320 15:59:19.244225 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:59:19 crc kubenswrapper[4813]: I0320 15:59:19.523572 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/openstack-galera-0" Mar 20 15:59:19 crc kubenswrapper[4813]: I0320 15:59:19.771044 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 15:59:21 crc kubenswrapper[4813]: I0320 15:59:21.442890 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"6dcc5c20-eb16-44c2-be60-17a397527235","Type":"ContainerStarted","Data":"631f6cc928c6e7ce300b61333df186c4e7218bdebd10d4ccdf8c9ea21992d1e6"} Mar 20 15:59:23 crc kubenswrapper[4813]: I0320 15:59:23.463103 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"6dcc5c20-eb16-44c2-be60-17a397527235","Type":"ContainerStarted","Data":"438b4eb22c7e0ab355e3c7ae55d9489469b841d09922fe64ee9ed26c7a57f4a5"} Mar 20 15:59:23 crc kubenswrapper[4813]: I0320 15:59:23.463418 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:59:24 crc kubenswrapper[4813]: I0320 15:59:24.471863 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Mar 20 15:59:24 crc kubenswrapper[4813]: I0320 15:59:24.501582 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/alertmanager-metric-storage-0" podStartSLOduration=4.887641838 podStartE2EDuration="34.501564633s" podCreationTimestamp="2026-03-20 15:58:50 +0000 UTC" firstStartedPulling="2026-03-20 15:58:51.173019495 +0000 UTC m=+1260.595722336" lastFinishedPulling="2026-03-20 15:59:20.78694229 +0000 UTC m=+1290.209645131" observedRunningTime="2026-03-20 15:59:23.490225289 +0000 UTC m=+1292.912928130" watchObservedRunningTime="2026-03-20 15:59:24.501564633 +0000 UTC m=+1293.924267474" Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.341939 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-cb4f878bb-p9q64" podUID="04185fdf-5c0c-46c8-8447-bb3225c8409b" containerName="console" containerID="cri-o://9086b30e50e46c8e5a3e4999f23138fbee4200a6b043bb62e692f238e2dc4ad5" gracePeriod=15 Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.496991 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-cb4f878bb-p9q64_04185fdf-5c0c-46c8-8447-bb3225c8409b/console/0.log" Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.497075 4813 generic.go:334] "Generic (PLEG): container finished" podID="04185fdf-5c0c-46c8-8447-bb3225c8409b" containerID="9086b30e50e46c8e5a3e4999f23138fbee4200a6b043bb62e692f238e2dc4ad5" exitCode=2 Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.497115 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-cb4f878bb-p9q64" event={"ID":"04185fdf-5c0c-46c8-8447-bb3225c8409b","Type":"ContainerDied","Data":"9086b30e50e46c8e5a3e4999f23138fbee4200a6b043bb62e692f238e2dc4ad5"} Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.820133 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/root-account-create-update-ddxhk"] Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.821156 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.822841 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"openstack-mariadb-root-db-secret" Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.830773 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/root-account-create-update-ddxhk"] Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.951599 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gljql\" (UniqueName: \"kubernetes.io/projected/39739c2c-937f-4b31-b6f7-9f04c13411c8-kube-api-access-gljql\") pod \"root-account-create-update-ddxhk\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:27 crc kubenswrapper[4813]: I0320 15:59:27.951770 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39739c2c-937f-4b31-b6f7-9f04c13411c8-operator-scripts\") pod \"root-account-create-update-ddxhk\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:28 crc kubenswrapper[4813]: I0320 15:59:28.052848 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gljql\" (UniqueName: \"kubernetes.io/projected/39739c2c-937f-4b31-b6f7-9f04c13411c8-kube-api-access-gljql\") pod \"root-account-create-update-ddxhk\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:28 crc kubenswrapper[4813]: I0320 15:59:28.052936 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39739c2c-937f-4b31-b6f7-9f04c13411c8-operator-scripts\") pod \"root-account-create-update-ddxhk\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:28 crc kubenswrapper[4813]: I0320 15:59:28.053702 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39739c2c-937f-4b31-b6f7-9f04c13411c8-operator-scripts\") pod \"root-account-create-update-ddxhk\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:28 crc kubenswrapper[4813]: I0320 15:59:28.080142 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gljql\" (UniqueName: \"kubernetes.io/projected/39739c2c-937f-4b31-b6f7-9f04c13411c8-kube-api-access-gljql\") pod \"root-account-create-update-ddxhk\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:28 crc kubenswrapper[4813]: I0320 15:59:28.138830 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.057119 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-db-create-gsq8f"] Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.058702 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.067834 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93895b7a-2be8-44b3-92d0-ad24c832633b-operator-scripts\") pod \"keystone-db-create-gsq8f\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.067983 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rqvk\" (UniqueName: \"kubernetes.io/projected/93895b7a-2be8-44b3-92d0-ad24c832633b-kube-api-access-8rqvk\") pod \"keystone-db-create-gsq8f\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.122686 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-create-gsq8f"] Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.169406 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rqvk\" (UniqueName: \"kubernetes.io/projected/93895b7a-2be8-44b3-92d0-ad24c832633b-kube-api-access-8rqvk\") pod \"keystone-db-create-gsq8f\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.169559 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93895b7a-2be8-44b3-92d0-ad24c832633b-operator-scripts\") pod \"keystone-db-create-gsq8f\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.170363 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93895b7a-2be8-44b3-92d0-ad24c832633b-operator-scripts\") pod \"keystone-db-create-gsq8f\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.185190 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-e994-account-create-update-45jsg"] Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.186774 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.189844 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-db-secret" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.202736 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-e994-account-create-update-45jsg"] Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.206189 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rqvk\" (UniqueName: \"kubernetes.io/projected/93895b7a-2be8-44b3-92d0-ad24c832633b-kube-api-access-8rqvk\") pod \"keystone-db-create-gsq8f\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.372664 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec418c85-33d1-4b27-9b14-bfb444247571-operator-scripts\") pod \"keystone-e994-account-create-update-45jsg\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.372760 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6rkr\" (UniqueName: \"kubernetes.io/projected/ec418c85-33d1-4b27-9b14-bfb444247571-kube-api-access-j6rkr\") pod \"keystone-e994-account-create-update-45jsg\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.382937 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.473941 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6rkr\" (UniqueName: \"kubernetes.io/projected/ec418c85-33d1-4b27-9b14-bfb444247571-kube-api-access-j6rkr\") pod \"keystone-e994-account-create-update-45jsg\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.474272 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec418c85-33d1-4b27-9b14-bfb444247571-operator-scripts\") pod \"keystone-e994-account-create-update-45jsg\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.475036 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec418c85-33d1-4b27-9b14-bfb444247571-operator-scripts\") pod \"keystone-e994-account-create-update-45jsg\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.491669 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6rkr\" (UniqueName: \"kubernetes.io/projected/ec418c85-33d1-4b27-9b14-bfb444247571-kube-api-access-j6rkr\") pod \"keystone-e994-account-create-update-45jsg\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.549109 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.953997 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-create-gsq8f"] Mar 20 15:59:29 crc kubenswrapper[4813]: W0320 15:59:29.962551 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93895b7a_2be8_44b3_92d0_ad24c832633b.slice/crio-23a66f029d41865bf423681f3c9f53de62af4bfcdb6691b85656fc5046503abb WatchSource:0}: Error finding container 23a66f029d41865bf423681f3c9f53de62af4bfcdb6691b85656fc5046503abb: Status 404 returned error can't find the container with id 23a66f029d41865bf423681f3c9f53de62af4bfcdb6691b85656fc5046503abb Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.976053 4813 patch_prober.go:28] interesting pod/console-cb4f878bb-p9q64 container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.48:8443/health\": dial tcp 10.217.0.48:8443: connect: connection refused" start-of-body= Mar 20 15:59:29 crc kubenswrapper[4813]: I0320 15:59:29.976138 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-cb4f878bb-p9q64" podUID="04185fdf-5c0c-46c8-8447-bb3225c8409b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.48:8443/health\": dial tcp 10.217.0.48:8443: connect: connection refused" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.051254 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/root-account-create-update-ddxhk"] Mar 20 15:59:30 crc kubenswrapper[4813]: W0320 15:59:30.083459 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39739c2c_937f_4b31_b6f7_9f04c13411c8.slice/crio-2c0e56dbc393ec3478f87e165198caadd311b140ad41e2ab34d9e4a014e66f55 WatchSource:0}: Error finding container 2c0e56dbc393ec3478f87e165198caadd311b140ad41e2ab34d9e4a014e66f55: Status 404 returned error can't find the container with id 2c0e56dbc393ec3478f87e165198caadd311b140ad41e2ab34d9e4a014e66f55 Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.114797 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-e994-account-create-update-45jsg"] Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.323250 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-cb4f878bb-p9q64_04185fdf-5c0c-46c8-8447-bb3225c8409b/console/0.log" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.323576 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.488207 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-config\") pod \"04185fdf-5c0c-46c8-8447-bb3225c8409b\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.488303 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-oauth-config\") pod \"04185fdf-5c0c-46c8-8447-bb3225c8409b\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.488344 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-serving-cert\") pod \"04185fdf-5c0c-46c8-8447-bb3225c8409b\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.488408 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-oauth-serving-cert\") pod \"04185fdf-5c0c-46c8-8447-bb3225c8409b\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.488426 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-service-ca\") pod \"04185fdf-5c0c-46c8-8447-bb3225c8409b\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.488450 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-trusted-ca-bundle\") pod \"04185fdf-5c0c-46c8-8447-bb3225c8409b\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.488472 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbxdl\" (UniqueName: \"kubernetes.io/projected/04185fdf-5c0c-46c8-8447-bb3225c8409b-kube-api-access-wbxdl\") pod \"04185fdf-5c0c-46c8-8447-bb3225c8409b\" (UID: \"04185fdf-5c0c-46c8-8447-bb3225c8409b\") " Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.489809 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "04185fdf-5c0c-46c8-8447-bb3225c8409b" (UID: "04185fdf-5c0c-46c8-8447-bb3225c8409b"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.489859 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-service-ca" (OuterVolumeSpecName: "service-ca") pod "04185fdf-5c0c-46c8-8447-bb3225c8409b" (UID: "04185fdf-5c0c-46c8-8447-bb3225c8409b"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.490068 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "04185fdf-5c0c-46c8-8447-bb3225c8409b" (UID: "04185fdf-5c0c-46c8-8447-bb3225c8409b"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.490147 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-config" (OuterVolumeSpecName: "console-config") pod "04185fdf-5c0c-46c8-8447-bb3225c8409b" (UID: "04185fdf-5c0c-46c8-8447-bb3225c8409b"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.491986 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04185fdf-5c0c-46c8-8447-bb3225c8409b-kube-api-access-wbxdl" (OuterVolumeSpecName: "kube-api-access-wbxdl") pod "04185fdf-5c0c-46c8-8447-bb3225c8409b" (UID: "04185fdf-5c0c-46c8-8447-bb3225c8409b"). InnerVolumeSpecName "kube-api-access-wbxdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.492318 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "04185fdf-5c0c-46c8-8447-bb3225c8409b" (UID: "04185fdf-5c0c-46c8-8447-bb3225c8409b"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.492904 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "04185fdf-5c0c-46c8-8447-bb3225c8409b" (UID: "04185fdf-5c0c-46c8-8447-bb3225c8409b"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.520040 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-cb4f878bb-p9q64_04185fdf-5c0c-46c8-8447-bb3225c8409b/console/0.log" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.520142 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-cb4f878bb-p9q64" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.520226 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-cb4f878bb-p9q64" event={"ID":"04185fdf-5c0c-46c8-8447-bb3225c8409b","Type":"ContainerDied","Data":"47ff0156181fd904fba5f860238a69e83b358c03fc21424804e3253cad69c94b"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.520317 4813 scope.go:117] "RemoveContainer" containerID="9086b30e50e46c8e5a3e4999f23138fbee4200a6b043bb62e692f238e2dc4ad5" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.522443 4813 generic.go:334] "Generic (PLEG): container finished" podID="39739c2c-937f-4b31-b6f7-9f04c13411c8" containerID="4e975ada3dc45792f523b587ae1716d362d9b8fe6badd812ea32e039f85ffba8" exitCode=0 Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.522511 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/root-account-create-update-ddxhk" event={"ID":"39739c2c-937f-4b31-b6f7-9f04c13411c8","Type":"ContainerDied","Data":"4e975ada3dc45792f523b587ae1716d362d9b8fe6badd812ea32e039f85ffba8"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.522557 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/root-account-create-update-ddxhk" event={"ID":"39739c2c-937f-4b31-b6f7-9f04c13411c8","Type":"ContainerStarted","Data":"2c0e56dbc393ec3478f87e165198caadd311b140ad41e2ab34d9e4a014e66f55"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.524629 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" event={"ID":"ec418c85-33d1-4b27-9b14-bfb444247571","Type":"ContainerStarted","Data":"fea26861c38e5fc5e5111dc59de109ea2221de9ed285ad803ca467d681350b17"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.524666 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" event={"ID":"ec418c85-33d1-4b27-9b14-bfb444247571","Type":"ContainerStarted","Data":"3331444e6d464910b0dd88084c30777e0faae53162639c9a4508e363497f66ac"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.531295 4813 generic.go:334] "Generic (PLEG): container finished" podID="93895b7a-2be8-44b3-92d0-ad24c832633b" containerID="ea3d2dcf6598957acf5286dab06492c127c280b038f630a6ca76deda59bc0c10" exitCode=0 Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.531391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-create-gsq8f" event={"ID":"93895b7a-2be8-44b3-92d0-ad24c832633b","Type":"ContainerDied","Data":"ea3d2dcf6598957acf5286dab06492c127c280b038f630a6ca76deda59bc0c10"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.531445 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-create-gsq8f" event={"ID":"93895b7a-2be8-44b3-92d0-ad24c832633b","Type":"ContainerStarted","Data":"23a66f029d41865bf423681f3c9f53de62af4bfcdb6691b85656fc5046503abb"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.535196 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerStarted","Data":"7562cdbc6b6f41f4410f6a11ac818face44376c5eb1e3ca089351f46786b4f01"} Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.562645 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" podStartSLOduration=1.562626811 podStartE2EDuration="1.562626811s" podCreationTimestamp="2026-03-20 15:59:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 15:59:30.556819203 +0000 UTC m=+1299.979522034" watchObservedRunningTime="2026-03-20 15:59:30.562626811 +0000 UTC m=+1299.985329652" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.591289 4813 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.591346 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.591365 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.591382 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbxdl\" (UniqueName: \"kubernetes.io/projected/04185fdf-5c0c-46c8-8447-bb3225c8409b-kube-api-access-wbxdl\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.591406 4813 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.591423 4813 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-oauth-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.591439 4813 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/04185fdf-5c0c-46c8-8447-bb3225c8409b-console-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.700937 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-cb4f878bb-p9q64"] Mar 20 15:59:30 crc kubenswrapper[4813]: I0320 15:59:30.708461 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-cb4f878bb-p9q64"] Mar 20 15:59:31 crc kubenswrapper[4813]: I0320 15:59:31.278276 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04185fdf-5c0c-46c8-8447-bb3225c8409b" path="/var/lib/kubelet/pods/04185fdf-5c0c-46c8-8447-bb3225c8409b/volumes" Mar 20 15:59:31 crc kubenswrapper[4813]: I0320 15:59:31.545663 4813 generic.go:334] "Generic (PLEG): container finished" podID="ec418c85-33d1-4b27-9b14-bfb444247571" containerID="fea26861c38e5fc5e5111dc59de109ea2221de9ed285ad803ca467d681350b17" exitCode=0 Mar 20 15:59:31 crc kubenswrapper[4813]: I0320 15:59:31.545703 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" event={"ID":"ec418c85-33d1-4b27-9b14-bfb444247571","Type":"ContainerDied","Data":"fea26861c38e5fc5e5111dc59de109ea2221de9ed285ad803ca467d681350b17"} Mar 20 15:59:31 crc kubenswrapper[4813]: I0320 15:59:31.885080 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.003595 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.013620 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rqvk\" (UniqueName: \"kubernetes.io/projected/93895b7a-2be8-44b3-92d0-ad24c832633b-kube-api-access-8rqvk\") pod \"93895b7a-2be8-44b3-92d0-ad24c832633b\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.013687 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93895b7a-2be8-44b3-92d0-ad24c832633b-operator-scripts\") pod \"93895b7a-2be8-44b3-92d0-ad24c832633b\" (UID: \"93895b7a-2be8-44b3-92d0-ad24c832633b\") " Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.014287 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93895b7a-2be8-44b3-92d0-ad24c832633b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "93895b7a-2be8-44b3-92d0-ad24c832633b" (UID: "93895b7a-2be8-44b3-92d0-ad24c832633b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.024074 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93895b7a-2be8-44b3-92d0-ad24c832633b-kube-api-access-8rqvk" (OuterVolumeSpecName: "kube-api-access-8rqvk") pod "93895b7a-2be8-44b3-92d0-ad24c832633b" (UID: "93895b7a-2be8-44b3-92d0-ad24c832633b"). InnerVolumeSpecName "kube-api-access-8rqvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.115023 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gljql\" (UniqueName: \"kubernetes.io/projected/39739c2c-937f-4b31-b6f7-9f04c13411c8-kube-api-access-gljql\") pod \"39739c2c-937f-4b31-b6f7-9f04c13411c8\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.115250 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39739c2c-937f-4b31-b6f7-9f04c13411c8-operator-scripts\") pod \"39739c2c-937f-4b31-b6f7-9f04c13411c8\" (UID: \"39739c2c-937f-4b31-b6f7-9f04c13411c8\") " Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.115637 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rqvk\" (UniqueName: \"kubernetes.io/projected/93895b7a-2be8-44b3-92d0-ad24c832633b-kube-api-access-8rqvk\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.115658 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93895b7a-2be8-44b3-92d0-ad24c832633b-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.116645 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39739c2c-937f-4b31-b6f7-9f04c13411c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39739c2c-937f-4b31-b6f7-9f04c13411c8" (UID: "39739c2c-937f-4b31-b6f7-9f04c13411c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.118946 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39739c2c-937f-4b31-b6f7-9f04c13411c8-kube-api-access-gljql" (OuterVolumeSpecName: "kube-api-access-gljql") pod "39739c2c-937f-4b31-b6f7-9f04c13411c8" (UID: "39739c2c-937f-4b31-b6f7-9f04c13411c8"). InnerVolumeSpecName "kube-api-access-gljql". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.217415 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39739c2c-937f-4b31-b6f7-9f04c13411c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.217469 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gljql\" (UniqueName: \"kubernetes.io/projected/39739c2c-937f-4b31-b6f7-9f04c13411c8-kube-api-access-gljql\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.557717 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/root-account-create-update-ddxhk" event={"ID":"39739c2c-937f-4b31-b6f7-9f04c13411c8","Type":"ContainerDied","Data":"2c0e56dbc393ec3478f87e165198caadd311b140ad41e2ab34d9e4a014e66f55"} Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.557756 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/root-account-create-update-ddxhk" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.557773 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c0e56dbc393ec3478f87e165198caadd311b140ad41e2ab34d9e4a014e66f55" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.559847 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-create-gsq8f" event={"ID":"93895b7a-2be8-44b3-92d0-ad24c832633b","Type":"ContainerDied","Data":"23a66f029d41865bf423681f3c9f53de62af4bfcdb6691b85656fc5046503abb"} Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.559883 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23a66f029d41865bf423681f3c9f53de62af4bfcdb6691b85656fc5046503abb" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.559954 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-gsq8f" Mar 20 15:59:32 crc kubenswrapper[4813]: I0320 15:59:32.999691 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.063833 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec418c85-33d1-4b27-9b14-bfb444247571-operator-scripts\") pod \"ec418c85-33d1-4b27-9b14-bfb444247571\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.063912 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6rkr\" (UniqueName: \"kubernetes.io/projected/ec418c85-33d1-4b27-9b14-bfb444247571-kube-api-access-j6rkr\") pod \"ec418c85-33d1-4b27-9b14-bfb444247571\" (UID: \"ec418c85-33d1-4b27-9b14-bfb444247571\") " Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.068681 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec418c85-33d1-4b27-9b14-bfb444247571-kube-api-access-j6rkr" (OuterVolumeSpecName: "kube-api-access-j6rkr") pod "ec418c85-33d1-4b27-9b14-bfb444247571" (UID: "ec418c85-33d1-4b27-9b14-bfb444247571"). InnerVolumeSpecName "kube-api-access-j6rkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.076858 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec418c85-33d1-4b27-9b14-bfb444247571-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ec418c85-33d1-4b27-9b14-bfb444247571" (UID: "ec418c85-33d1-4b27-9b14-bfb444247571"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.165569 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ec418c85-33d1-4b27-9b14-bfb444247571-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.165617 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6rkr\" (UniqueName: \"kubernetes.io/projected/ec418c85-33d1-4b27-9b14-bfb444247571-kube-api-access-j6rkr\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.572413 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerStarted","Data":"6b82cdd6d07c8900a33203e2ad67b41b03a469c94cfe5d258d2803c589477a98"} Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.577454 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" event={"ID":"ec418c85-33d1-4b27-9b14-bfb444247571","Type":"ContainerDied","Data":"3331444e6d464910b0dd88084c30777e0faae53162639c9a4508e363497f66ac"} Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.577546 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3331444e6d464910b0dd88084c30777e0faae53162639c9a4508e363497f66ac" Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.577626 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-e994-account-create-update-45jsg" Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.842618 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 15:59:33 crc kubenswrapper[4813]: I0320 15:59:33.842682 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 15:59:36 crc kubenswrapper[4813]: I0320 15:59:36.604888 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerStarted","Data":"1e80ed77297d8d7499392552695bba270ccf1f45f6d4d9d10a0a074e2c58abd5"} Mar 20 15:59:36 crc kubenswrapper[4813]: I0320 15:59:36.658285 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/prometheus-metric-storage-0" podStartSLOduration=2.763031455 podStartE2EDuration="46.658254055s" podCreationTimestamp="2026-03-20 15:58:50 +0000 UTC" firstStartedPulling="2026-03-20 15:58:52.193082765 +0000 UTC m=+1261.615785606" lastFinishedPulling="2026-03-20 15:59:36.088305365 +0000 UTC m=+1305.511008206" observedRunningTime="2026-03-20 15:59:36.640851383 +0000 UTC m=+1306.063554264" watchObservedRunningTime="2026-03-20 15:59:36.658254055 +0000 UTC m=+1306.080956936" Mar 20 15:59:41 crc kubenswrapper[4813]: I0320 15:59:41.579944 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:49 crc kubenswrapper[4813]: I0320 15:59:49.723181 4813 generic.go:334] "Generic (PLEG): container finished" podID="ec1149a7-ab74-4cc6-9e54-66a6136d41ac" containerID="ba6e108fef263839b5a13a586ea3ceb79329926512c3eea6a5fc690ba681553f" exitCode=0 Mar 20 15:59:49 crc kubenswrapper[4813]: I0320 15:59:49.723277 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"ec1149a7-ab74-4cc6-9e54-66a6136d41ac","Type":"ContainerDied","Data":"ba6e108fef263839b5a13a586ea3ceb79329926512c3eea6a5fc690ba681553f"} Mar 20 15:59:50 crc kubenswrapper[4813]: I0320 15:59:50.734257 4813 generic.go:334] "Generic (PLEG): container finished" podID="0f7bff9e-8c2a-478e-a30e-55d5be1df762" containerID="38ea0653d9a0d0d96f21c44cd996c4ce74d06c876744ddd7325dd462bc0ec30a" exitCode=0 Mar 20 15:59:50 crc kubenswrapper[4813]: I0320 15:59:50.734319 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"0f7bff9e-8c2a-478e-a30e-55d5be1df762","Type":"ContainerDied","Data":"38ea0653d9a0d0d96f21c44cd996c4ce74d06c876744ddd7325dd462bc0ec30a"} Mar 20 15:59:50 crc kubenswrapper[4813]: I0320 15:59:50.738594 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"ec1149a7-ab74-4cc6-9e54-66a6136d41ac","Type":"ContainerStarted","Data":"ce698c44a477574b29240cea5a3cef822d2fd2369ffbd346b095c919ae041cfc"} Mar 20 15:59:50 crc kubenswrapper[4813]: I0320 15:59:50.738997 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 15:59:50 crc kubenswrapper[4813]: I0320 15:59:50.805269 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/rabbitmq-server-0" podStartSLOduration=37.556964518 podStartE2EDuration="1m5.805251496s" podCreationTimestamp="2026-03-20 15:58:45 +0000 UTC" firstStartedPulling="2026-03-20 15:58:47.424501674 +0000 UTC m=+1256.847204515" lastFinishedPulling="2026-03-20 15:59:15.672788652 +0000 UTC m=+1285.095491493" observedRunningTime="2026-03-20 15:59:50.804926248 +0000 UTC m=+1320.227629089" watchObservedRunningTime="2026-03-20 15:59:50.805251496 +0000 UTC m=+1320.227954347" Mar 20 15:59:51 crc kubenswrapper[4813]: I0320 15:59:51.579606 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:51 crc kubenswrapper[4813]: I0320 15:59:51.592363 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:51 crc kubenswrapper[4813]: I0320 15:59:51.748837 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"0f7bff9e-8c2a-478e-a30e-55d5be1df762","Type":"ContainerStarted","Data":"05a23873520fff3bba9f9e8d51659feec3e242b7b6a8bd4aa911088f9089790c"} Mar 20 15:59:51 crc kubenswrapper[4813]: I0320 15:59:51.749534 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 15:59:51 crc kubenswrapper[4813]: I0320 15:59:51.750706 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:51 crc kubenswrapper[4813]: I0320 15:59:51.780300 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" podStartSLOduration=37.872049467 podStartE2EDuration="1m5.780281187s" podCreationTimestamp="2026-03-20 15:58:46 +0000 UTC" firstStartedPulling="2026-03-20 15:58:48.048735255 +0000 UTC m=+1257.471438086" lastFinishedPulling="2026-03-20 15:59:15.956966965 +0000 UTC m=+1285.379669806" observedRunningTime="2026-03-20 15:59:51.771279573 +0000 UTC m=+1321.193982424" watchObservedRunningTime="2026-03-20 15:59:51.780281187 +0000 UTC m=+1321.202984028" Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.298598 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.299103 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="prometheus" containerID="cri-o://7562cdbc6b6f41f4410f6a11ac818face44376c5eb1e3ca089351f46786b4f01" gracePeriod=600 Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.299180 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="thanos-sidecar" containerID="cri-o://1e80ed77297d8d7499392552695bba270ccf1f45f6d4d9d10a0a074e2c58abd5" gracePeriod=600 Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.299185 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="config-reloader" containerID="cri-o://6b82cdd6d07c8900a33203e2ad67b41b03a469c94cfe5d258d2803c589477a98" gracePeriod=600 Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.771598 4813 generic.go:334] "Generic (PLEG): container finished" podID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerID="1e80ed77297d8d7499392552695bba270ccf1f45f6d4d9d10a0a074e2c58abd5" exitCode=0 Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.771647 4813 generic.go:334] "Generic (PLEG): container finished" podID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerID="6b82cdd6d07c8900a33203e2ad67b41b03a469c94cfe5d258d2803c589477a98" exitCode=0 Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.771654 4813 generic.go:334] "Generic (PLEG): container finished" podID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerID="7562cdbc6b6f41f4410f6a11ac818face44376c5eb1e3ca089351f46786b4f01" exitCode=0 Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.771697 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerDied","Data":"1e80ed77297d8d7499392552695bba270ccf1f45f6d4d9d10a0a074e2c58abd5"} Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.771743 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerDied","Data":"6b82cdd6d07c8900a33203e2ad67b41b03a469c94cfe5d258d2803c589477a98"} Mar 20 15:59:54 crc kubenswrapper[4813]: I0320 15:59:54.771754 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerDied","Data":"7562cdbc6b6f41f4410f6a11ac818face44376c5eb1e3ca089351f46786b4f01"} Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.192130 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.211831 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-1\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.211909 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-0\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.211985 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-2\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212044 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config-out\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212089 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-thanos-prometheus-http-client-file\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212126 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-web-config\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212198 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx829\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-kube-api-access-lx829\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212243 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212380 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212398 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212442 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212534 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-tls-assets\") pod \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\" (UID: \"c6328bb8-f6a3-476c-a58c-d9beec93b7ff\") " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212642 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.212975 4813 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.213007 4813 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.213026 4813 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.229076 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.234637 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-kube-api-access-lx829" (OuterVolumeSpecName: "kube-api-access-lx829") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "kube-api-access-lx829". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.234743 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.246677 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config" (OuterVolumeSpecName: "config") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.251294 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config-out" (OuterVolumeSpecName: "config-out") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.266800 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e". PluginName "kubernetes.io/csi", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.272324 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-web-config" (OuterVolumeSpecName: "web-config") pod "c6328bb8-f6a3-476c-a58c-d9beec93b7ff" (UID: "c6328bb8-f6a3-476c-a58c-d9beec93b7ff"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.314318 4813 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-tls-assets\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.314365 4813 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config-out\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.314379 4813 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.314394 4813 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-web-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.314407 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx829\" (UniqueName: \"kubernetes.io/projected/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-kube-api-access-lx829\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.314419 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6328bb8-f6a3-476c-a58c-d9beec93b7ff-config\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.314446 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") on node \"crc\" " Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.330929 4813 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.331095 4813 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e") on node "crc" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.416181 4813 reconciler_common.go:293] "Volume detached for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") on node \"crc\" DevicePath \"\"" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.783455 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"c6328bb8-f6a3-476c-a58c-d9beec93b7ff","Type":"ContainerDied","Data":"f277d5bbe5949676cb3413c885b785c61d42215f3d7e69fb0907158982a2d3e2"} Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.783849 4813 scope.go:117] "RemoveContainer" containerID="1e80ed77297d8d7499392552695bba270ccf1f45f6d4d9d10a0a074e2c58abd5" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.783661 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.813216 4813 scope.go:117] "RemoveContainer" containerID="6b82cdd6d07c8900a33203e2ad67b41b03a469c94cfe5d258d2803c589477a98" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.814729 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.820953 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.844701 4813 scope.go:117] "RemoveContainer" containerID="7562cdbc6b6f41f4410f6a11ac818face44376c5eb1e3ca089351f46786b4f01" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.848764 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.849321 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec418c85-33d1-4b27-9b14-bfb444247571" containerName="mariadb-account-create-update" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.849416 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec418c85-33d1-4b27-9b14-bfb444247571" containerName="mariadb-account-create-update" Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.849538 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="prometheus" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.849626 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="prometheus" Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.849726 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="thanos-sidecar" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.849810 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="thanos-sidecar" Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.849898 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="config-reloader" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.849979 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="config-reloader" Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.850078 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04185fdf-5c0c-46c8-8447-bb3225c8409b" containerName="console" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.850165 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="04185fdf-5c0c-46c8-8447-bb3225c8409b" containerName="console" Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.850250 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39739c2c-937f-4b31-b6f7-9f04c13411c8" containerName="mariadb-account-create-update" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.850354 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="39739c2c-937f-4b31-b6f7-9f04c13411c8" containerName="mariadb-account-create-update" Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.850457 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93895b7a-2be8-44b3-92d0-ad24c832633b" containerName="mariadb-database-create" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.850562 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="93895b7a-2be8-44b3-92d0-ad24c832633b" containerName="mariadb-database-create" Mar 20 15:59:55 crc kubenswrapper[4813]: E0320 15:59:55.850648 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="init-config-reloader" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.850730 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="init-config-reloader" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.850974 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="prometheus" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.851085 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="config-reloader" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.851164 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" containerName="thanos-sidecar" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.851262 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec418c85-33d1-4b27-9b14-bfb444247571" containerName="mariadb-account-create-update" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.851348 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="39739c2c-937f-4b31-b6f7-9f04c13411c8" containerName="mariadb-account-create-update" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.851427 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="04185fdf-5c0c-46c8-8447-bb3225c8409b" containerName="console" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.851533 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="93895b7a-2be8-44b3-92d0-ad24c832633b" containerName="mariadb-database-create" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.853283 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.859041 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-metric-storage-prometheus-svc" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.859789 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.860032 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-1" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.860584 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"metric-storage-prometheus-dockercfg-8v6vp" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.860740 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-2" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.860990 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-0" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.864165 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.865979 4813 scope.go:117] "RemoveContainer" containerID="2cbb755d773fdf2443169a18bd151cf3e5bf2de6974deb2136e6980ba8c2b374" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.868936 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.888554 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-web-config" Mar 20 15:59:55 crc kubenswrapper[4813]: I0320 15:59:55.917585 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-tls-assets-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026623 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026721 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d2f088cf-8304-46b5-af82-96110c742638-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026751 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026787 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026818 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns56j\" (UniqueName: \"kubernetes.io/projected/d2f088cf-8304-46b5-af82-96110c742638-kube-api-access-ns56j\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026853 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-config\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026875 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026911 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026940 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.026981 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.027021 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.027045 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.027075 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d2f088cf-8304-46b5-af82-96110c742638-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.128389 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d2f088cf-8304-46b5-af82-96110c742638-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.128686 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.128773 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.128851 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns56j\" (UniqueName: \"kubernetes.io/projected/d2f088cf-8304-46b5-af82-96110c742638-kube-api-access-ns56j\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.128927 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-config\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129003 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129119 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129219 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129312 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129408 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129532 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129640 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d2f088cf-8304-46b5-af82-96110c742638-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.129749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.130851 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.132128 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.133820 4813 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.133925 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/504962cc396d8162f5a974b64cf1cdd7337fdfdd024497e20b3aeb0802e6d7eb/globalmount\"" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.134126 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/d2f088cf-8304-46b5-af82-96110c742638-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.136245 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.136985 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d2f088cf-8304-46b5-af82-96110c742638-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.137637 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.138384 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d2f088cf-8304-46b5-af82-96110c742638-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.138460 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-config\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.138750 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.139861 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.146088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/d2f088cf-8304-46b5-af82-96110c742638-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.155871 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns56j\" (UniqueName: \"kubernetes.io/projected/d2f088cf-8304-46b5-af82-96110c742638-kube-api-access-ns56j\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.169380 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d51a5d9e-7451-4c45-8e0a-d441c7d3209e\") pod \"prometheus-metric-storage-0\" (UID: \"d2f088cf-8304-46b5-af82-96110c742638\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.195125 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.631998 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Mar 20 15:59:56 crc kubenswrapper[4813]: I0320 15:59:56.791092 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"d2f088cf-8304-46b5-af82-96110c742638","Type":"ContainerStarted","Data":"c3142e183a22e0a6730cb325f53277848dde6eb56bda4c021237bf195eafd10f"} Mar 20 15:59:57 crc kubenswrapper[4813]: I0320 15:59:57.274388 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6328bb8-f6a3-476c-a58c-d9beec93b7ff" path="/var/lib/kubelet/pods/c6328bb8-f6a3-476c-a58c-d9beec93b7ff/volumes" Mar 20 15:59:59 crc kubenswrapper[4813]: I0320 15:59:59.820090 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"d2f088cf-8304-46b5-af82-96110c742638","Type":"ContainerStarted","Data":"4c68b0e08a8113a8119b43ac4922ac156aedc074eb2abd5d66734638af860022"} Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.135715 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567040-5rzq4"] Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.136808 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567040-5rzq4" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.140802 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.141823 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.146304 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs"] Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.148017 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.160313 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567040-5rzq4"] Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.162687 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.162959 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.165688 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs"] Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.166019 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.290525 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhqwn\" (UniqueName: \"kubernetes.io/projected/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-kube-api-access-dhqwn\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.290594 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-config-volume\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.290626 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-secret-volume\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.290662 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4wzg\" (UniqueName: \"kubernetes.io/projected/f979cdb9-a7bf-4465-88f6-82f30ac60145-kube-api-access-r4wzg\") pod \"auto-csr-approver-29567040-5rzq4\" (UID: \"f979cdb9-a7bf-4465-88f6-82f30ac60145\") " pod="openshift-infra/auto-csr-approver-29567040-5rzq4" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.391928 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4wzg\" (UniqueName: \"kubernetes.io/projected/f979cdb9-a7bf-4465-88f6-82f30ac60145-kube-api-access-r4wzg\") pod \"auto-csr-approver-29567040-5rzq4\" (UID: \"f979cdb9-a7bf-4465-88f6-82f30ac60145\") " pod="openshift-infra/auto-csr-approver-29567040-5rzq4" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.392124 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhqwn\" (UniqueName: \"kubernetes.io/projected/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-kube-api-access-dhqwn\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.392207 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-config-volume\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.392280 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-secret-volume\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.394687 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-config-volume\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.400332 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-secret-volume\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.414740 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhqwn\" (UniqueName: \"kubernetes.io/projected/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-kube-api-access-dhqwn\") pod \"collect-profiles-29567040-9bzhs\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.429914 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4wzg\" (UniqueName: \"kubernetes.io/projected/f979cdb9-a7bf-4465-88f6-82f30ac60145-kube-api-access-r4wzg\") pod \"auto-csr-approver-29567040-5rzq4\" (UID: \"f979cdb9-a7bf-4465-88f6-82f30ac60145\") " pod="openshift-infra/auto-csr-approver-29567040-5rzq4" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.462285 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567040-5rzq4" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.471323 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:00 crc kubenswrapper[4813]: I0320 16:00:00.929802 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567040-5rzq4"] Mar 20 16:00:00 crc kubenswrapper[4813]: W0320 16:00:00.934953 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf979cdb9_a7bf_4465_88f6_82f30ac60145.slice/crio-873fcece996bd0e828de44819589f2bdc37dafd87ffbb88637d2fda6da0f29bf WatchSource:0}: Error finding container 873fcece996bd0e828de44819589f2bdc37dafd87ffbb88637d2fda6da0f29bf: Status 404 returned error can't find the container with id 873fcece996bd0e828de44819589f2bdc37dafd87ffbb88637d2fda6da0f29bf Mar 20 16:00:01 crc kubenswrapper[4813]: I0320 16:00:01.048425 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs"] Mar 20 16:00:01 crc kubenswrapper[4813]: W0320 16:00:01.050442 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0568d2a5_e8f7_4cfd_9035_b441b7d06c17.slice/crio-294bf45e53d6d14028ea31ba79b8503d88bcadf751dbc42bbee76a692f9745ae WatchSource:0}: Error finding container 294bf45e53d6d14028ea31ba79b8503d88bcadf751dbc42bbee76a692f9745ae: Status 404 returned error can't find the container with id 294bf45e53d6d14028ea31ba79b8503d88bcadf751dbc42bbee76a692f9745ae Mar 20 16:00:01 crc kubenswrapper[4813]: I0320 16:00:01.838268 4813 generic.go:334] "Generic (PLEG): container finished" podID="0568d2a5-e8f7-4cfd-9035-b441b7d06c17" containerID="3d88b710a67fc0c8dec790dc051da5b1e62a11dd2dc04592d8055d4552927cce" exitCode=0 Mar 20 16:00:01 crc kubenswrapper[4813]: I0320 16:00:01.838403 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" event={"ID":"0568d2a5-e8f7-4cfd-9035-b441b7d06c17","Type":"ContainerDied","Data":"3d88b710a67fc0c8dec790dc051da5b1e62a11dd2dc04592d8055d4552927cce"} Mar 20 16:00:01 crc kubenswrapper[4813]: I0320 16:00:01.838657 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" event={"ID":"0568d2a5-e8f7-4cfd-9035-b441b7d06c17","Type":"ContainerStarted","Data":"294bf45e53d6d14028ea31ba79b8503d88bcadf751dbc42bbee76a692f9745ae"} Mar 20 16:00:01 crc kubenswrapper[4813]: I0320 16:00:01.841250 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567040-5rzq4" event={"ID":"f979cdb9-a7bf-4465-88f6-82f30ac60145","Type":"ContainerStarted","Data":"873fcece996bd0e828de44819589f2bdc37dafd87ffbb88637d2fda6da0f29bf"} Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.225605 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.342394 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-config-volume\") pod \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.342888 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-secret-volume\") pod \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.343119 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhqwn\" (UniqueName: \"kubernetes.io/projected/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-kube-api-access-dhqwn\") pod \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\" (UID: \"0568d2a5-e8f7-4cfd-9035-b441b7d06c17\") " Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.343186 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-config-volume" (OuterVolumeSpecName: "config-volume") pod "0568d2a5-e8f7-4cfd-9035-b441b7d06c17" (UID: "0568d2a5-e8f7-4cfd-9035-b441b7d06c17"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.343601 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.351275 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-kube-api-access-dhqwn" (OuterVolumeSpecName: "kube-api-access-dhqwn") pod "0568d2a5-e8f7-4cfd-9035-b441b7d06c17" (UID: "0568d2a5-e8f7-4cfd-9035-b441b7d06c17"). InnerVolumeSpecName "kube-api-access-dhqwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.352658 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0568d2a5-e8f7-4cfd-9035-b441b7d06c17" (UID: "0568d2a5-e8f7-4cfd-9035-b441b7d06c17"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.445853 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.445887 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhqwn\" (UniqueName: \"kubernetes.io/projected/0568d2a5-e8f7-4cfd-9035-b441b7d06c17-kube-api-access-dhqwn\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.842869 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.842924 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.860624 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" event={"ID":"0568d2a5-e8f7-4cfd-9035-b441b7d06c17","Type":"ContainerDied","Data":"294bf45e53d6d14028ea31ba79b8503d88bcadf751dbc42bbee76a692f9745ae"} Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.860664 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="294bf45e53d6d14028ea31ba79b8503d88bcadf751dbc42bbee76a692f9745ae" Mar 20 16:00:03 crc kubenswrapper[4813]: I0320 16:00:03.860662 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567040-9bzhs" Mar 20 16:00:05 crc kubenswrapper[4813]: I0320 16:00:05.884793 4813 generic.go:334] "Generic (PLEG): container finished" podID="d2f088cf-8304-46b5-af82-96110c742638" containerID="4c68b0e08a8113a8119b43ac4922ac156aedc074eb2abd5d66734638af860022" exitCode=0 Mar 20 16:00:05 crc kubenswrapper[4813]: I0320 16:00:05.885218 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"d2f088cf-8304-46b5-af82-96110c742638","Type":"ContainerDied","Data":"4c68b0e08a8113a8119b43ac4922ac156aedc074eb2abd5d66734638af860022"} Mar 20 16:00:06 crc kubenswrapper[4813]: I0320 16:00:06.894629 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"d2f088cf-8304-46b5-af82-96110c742638","Type":"ContainerStarted","Data":"fc1c4cd05b83ddf951b846cf8e69251af97b25ebc4e569ee971dcfe94841b3e9"} Mar 20 16:00:06 crc kubenswrapper[4813]: I0320 16:00:06.922734 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/rabbitmq-server-0" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.538778 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.555608 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-db-sync-m4lj9"] Mar 20 16:00:07 crc kubenswrapper[4813]: E0320 16:00:07.555938 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0568d2a5-e8f7-4cfd-9035-b441b7d06c17" containerName="collect-profiles" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.555955 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0568d2a5-e8f7-4cfd-9035-b441b7d06c17" containerName="collect-profiles" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.556109 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0568d2a5-e8f7-4cfd-9035-b441b7d06c17" containerName="collect-profiles" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.556654 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.560327 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.560360 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.561313 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.561441 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-zpjf7" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.572220 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-m4lj9"] Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.717818 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8f45\" (UniqueName: \"kubernetes.io/projected/538d338b-7770-4fd4-95b2-1fe7653e0159-kube-api-access-z8f45\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.718131 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-combined-ca-bundle\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.718168 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-config-data\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.819644 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-combined-ca-bundle\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.819722 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-config-data\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.819936 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8f45\" (UniqueName: \"kubernetes.io/projected/538d338b-7770-4fd4-95b2-1fe7653e0159-kube-api-access-z8f45\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.826257 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-combined-ca-bundle\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.837341 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-config-data\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.840116 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8f45\" (UniqueName: \"kubernetes.io/projected/538d338b-7770-4fd4-95b2-1fe7653e0159-kube-api-access-z8f45\") pod \"keystone-db-sync-m4lj9\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:07 crc kubenswrapper[4813]: I0320 16:00:07.871386 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:08 crc kubenswrapper[4813]: I0320 16:00:08.414578 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-m4lj9"] Mar 20 16:00:08 crc kubenswrapper[4813]: I0320 16:00:08.919931 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"d2f088cf-8304-46b5-af82-96110c742638","Type":"ContainerStarted","Data":"b09d6f8376e8e4e4c5f770aa6ba95070f7f1b220084ed35df276737c333f0b50"} Mar 20 16:00:08 crc kubenswrapper[4813]: I0320 16:00:08.920178 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"d2f088cf-8304-46b5-af82-96110c742638","Type":"ContainerStarted","Data":"30d13b31859159966eb05cd83a97138be12674986a595cc749314a1b4551dfa0"} Mar 20 16:00:08 crc kubenswrapper[4813]: I0320 16:00:08.921820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" event={"ID":"538d338b-7770-4fd4-95b2-1fe7653e0159","Type":"ContainerStarted","Data":"b8b9d5a668e12064aaffd9d0c0d2663c2cab8e6f9293bcee6dd226cbd1504c97"} Mar 20 16:00:08 crc kubenswrapper[4813]: I0320 16:00:08.946957 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/prometheus-metric-storage-0" podStartSLOduration=13.946934673 podStartE2EDuration="13.946934673s" podCreationTimestamp="2026-03-20 15:59:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:00:08.943679155 +0000 UTC m=+1338.366382016" watchObservedRunningTime="2026-03-20 16:00:08.946934673 +0000 UTC m=+1338.369637524" Mar 20 16:00:11 crc kubenswrapper[4813]: I0320 16:00:11.196850 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 16:00:11 crc kubenswrapper[4813]: I0320 16:00:11.197152 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 16:00:11 crc kubenswrapper[4813]: I0320 16:00:11.203207 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 16:00:11 crc kubenswrapper[4813]: I0320 16:00:11.947302 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/prometheus-metric-storage-0" Mar 20 16:00:16 crc kubenswrapper[4813]: I0320 16:00:16.981448 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" event={"ID":"538d338b-7770-4fd4-95b2-1fe7653e0159","Type":"ContainerStarted","Data":"da9612f0cf84d7e69de5fdca75b03e4d1d0366798953ba5e2be48a7317eb04f4"} Mar 20 16:00:16 crc kubenswrapper[4813]: I0320 16:00:16.983573 4813 generic.go:334] "Generic (PLEG): container finished" podID="f979cdb9-a7bf-4465-88f6-82f30ac60145" containerID="6900f2425bdb89293e31e98cc8fc49b5c6276b49e10ab5c808a3e94da4cd9b6c" exitCode=0 Mar 20 16:00:16 crc kubenswrapper[4813]: I0320 16:00:16.983602 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567040-5rzq4" event={"ID":"f979cdb9-a7bf-4465-88f6-82f30ac60145","Type":"ContainerDied","Data":"6900f2425bdb89293e31e98cc8fc49b5c6276b49e10ab5c808a3e94da4cd9b6c"} Mar 20 16:00:17 crc kubenswrapper[4813]: I0320 16:00:17.003229 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" podStartSLOduration=2.495451572 podStartE2EDuration="10.003213155s" podCreationTimestamp="2026-03-20 16:00:07 +0000 UTC" firstStartedPulling="2026-03-20 16:00:08.424882121 +0000 UTC m=+1337.847584982" lastFinishedPulling="2026-03-20 16:00:15.932643724 +0000 UTC m=+1345.355346565" observedRunningTime="2026-03-20 16:00:17.00008273 +0000 UTC m=+1346.422785581" watchObservedRunningTime="2026-03-20 16:00:17.003213155 +0000 UTC m=+1346.425915996" Mar 20 16:00:18 crc kubenswrapper[4813]: I0320 16:00:18.383553 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567040-5rzq4" Mar 20 16:00:18 crc kubenswrapper[4813]: I0320 16:00:18.494608 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4wzg\" (UniqueName: \"kubernetes.io/projected/f979cdb9-a7bf-4465-88f6-82f30ac60145-kube-api-access-r4wzg\") pod \"f979cdb9-a7bf-4465-88f6-82f30ac60145\" (UID: \"f979cdb9-a7bf-4465-88f6-82f30ac60145\") " Mar 20 16:00:18 crc kubenswrapper[4813]: I0320 16:00:18.502569 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f979cdb9-a7bf-4465-88f6-82f30ac60145-kube-api-access-r4wzg" (OuterVolumeSpecName: "kube-api-access-r4wzg") pod "f979cdb9-a7bf-4465-88f6-82f30ac60145" (UID: "f979cdb9-a7bf-4465-88f6-82f30ac60145"). InnerVolumeSpecName "kube-api-access-r4wzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:00:18 crc kubenswrapper[4813]: I0320 16:00:18.596129 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4wzg\" (UniqueName: \"kubernetes.io/projected/f979cdb9-a7bf-4465-88f6-82f30ac60145-kube-api-access-r4wzg\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:19 crc kubenswrapper[4813]: I0320 16:00:19.003362 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567040-5rzq4" event={"ID":"f979cdb9-a7bf-4465-88f6-82f30ac60145","Type":"ContainerDied","Data":"873fcece996bd0e828de44819589f2bdc37dafd87ffbb88637d2fda6da0f29bf"} Mar 20 16:00:19 crc kubenswrapper[4813]: I0320 16:00:19.003741 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="873fcece996bd0e828de44819589f2bdc37dafd87ffbb88637d2fda6da0f29bf" Mar 20 16:00:19 crc kubenswrapper[4813]: I0320 16:00:19.003574 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567040-5rzq4" Mar 20 16:00:19 crc kubenswrapper[4813]: I0320 16:00:19.455397 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567034-tvrfq"] Mar 20 16:00:19 crc kubenswrapper[4813]: I0320 16:00:19.465460 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567034-tvrfq"] Mar 20 16:00:20 crc kubenswrapper[4813]: I0320 16:00:20.013261 4813 generic.go:334] "Generic (PLEG): container finished" podID="538d338b-7770-4fd4-95b2-1fe7653e0159" containerID="da9612f0cf84d7e69de5fdca75b03e4d1d0366798953ba5e2be48a7317eb04f4" exitCode=0 Mar 20 16:00:20 crc kubenswrapper[4813]: I0320 16:00:20.013321 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" event={"ID":"538d338b-7770-4fd4-95b2-1fe7653e0159","Type":"ContainerDied","Data":"da9612f0cf84d7e69de5fdca75b03e4d1d0366798953ba5e2be48a7317eb04f4"} Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.282939 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bde898e2-d1f5-45dc-ba44-1cc4620152dc" path="/var/lib/kubelet/pods/bde898e2-d1f5-45dc-ba44-1cc4620152dc/volumes" Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.392264 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.556936 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-config-data\") pod \"538d338b-7770-4fd4-95b2-1fe7653e0159\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.557073 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8f45\" (UniqueName: \"kubernetes.io/projected/538d338b-7770-4fd4-95b2-1fe7653e0159-kube-api-access-z8f45\") pod \"538d338b-7770-4fd4-95b2-1fe7653e0159\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.557172 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-combined-ca-bundle\") pod \"538d338b-7770-4fd4-95b2-1fe7653e0159\" (UID: \"538d338b-7770-4fd4-95b2-1fe7653e0159\") " Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.565801 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/538d338b-7770-4fd4-95b2-1fe7653e0159-kube-api-access-z8f45" (OuterVolumeSpecName: "kube-api-access-z8f45") pod "538d338b-7770-4fd4-95b2-1fe7653e0159" (UID: "538d338b-7770-4fd4-95b2-1fe7653e0159"). InnerVolumeSpecName "kube-api-access-z8f45". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.577738 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "538d338b-7770-4fd4-95b2-1fe7653e0159" (UID: "538d338b-7770-4fd4-95b2-1fe7653e0159"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.592310 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-config-data" (OuterVolumeSpecName: "config-data") pod "538d338b-7770-4fd4-95b2-1fe7653e0159" (UID: "538d338b-7770-4fd4-95b2-1fe7653e0159"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.659222 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.659265 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8f45\" (UniqueName: \"kubernetes.io/projected/538d338b-7770-4fd4-95b2-1fe7653e0159-kube-api-access-z8f45\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:21 crc kubenswrapper[4813]: I0320 16:00:21.659282 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/538d338b-7770-4fd4-95b2-1fe7653e0159-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.033241 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" event={"ID":"538d338b-7770-4fd4-95b2-1fe7653e0159","Type":"ContainerDied","Data":"b8b9d5a668e12064aaffd9d0c0d2663c2cab8e6f9293bcee6dd226cbd1504c97"} Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.033297 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8b9d5a668e12064aaffd9d0c0d2663c2cab8e6f9293bcee6dd226cbd1504c97" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.033301 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-m4lj9" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.255454 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-4nm2s"] Mar 20 16:00:22 crc kubenswrapper[4813]: E0320 16:00:22.255808 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="538d338b-7770-4fd4-95b2-1fe7653e0159" containerName="keystone-db-sync" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.255820 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="538d338b-7770-4fd4-95b2-1fe7653e0159" containerName="keystone-db-sync" Mar 20 16:00:22 crc kubenswrapper[4813]: E0320 16:00:22.255830 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f979cdb9-a7bf-4465-88f6-82f30ac60145" containerName="oc" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.255837 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f979cdb9-a7bf-4465-88f6-82f30ac60145" containerName="oc" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.256009 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f979cdb9-a7bf-4465-88f6-82f30ac60145" containerName="oc" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.256040 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="538d338b-7770-4fd4-95b2-1fe7653e0159" containerName="keystone-db-sync" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.256883 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.261911 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.262198 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"osp-secret" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.262373 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-zpjf7" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.262581 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.262760 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.283130 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-4nm2s"] Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.370801 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-combined-ca-bundle\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.370883 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tl4p\" (UniqueName: \"kubernetes.io/projected/8edd195e-2956-40af-862d-6ebf3e48660d-kube-api-access-6tl4p\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.371071 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-credential-keys\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.371112 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-fernet-keys\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.371132 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-config-data\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.371165 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-scripts\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.391628 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.393361 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.398218 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.398370 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.415631 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.472900 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tl4p\" (UniqueName: \"kubernetes.io/projected/8edd195e-2956-40af-862d-6ebf3e48660d-kube-api-access-6tl4p\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.473001 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-credential-keys\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.473032 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-config-data\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.473052 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-fernet-keys\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.473079 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-scripts\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.473167 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-combined-ca-bundle\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.483450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-credential-keys\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.487123 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-combined-ca-bundle\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.494387 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tl4p\" (UniqueName: \"kubernetes.io/projected/8edd195e-2956-40af-862d-6ebf3e48660d-kube-api-access-6tl4p\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.494976 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-fernet-keys\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.498113 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-config-data\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.499427 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-scripts\") pod \"keystone-bootstrap-4nm2s\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.574420 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-scripts\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.574504 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-run-httpd\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.574968 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-config-data\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.575058 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.575121 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rzkp\" (UniqueName: \"kubernetes.io/projected/ff7919ef-56e4-4690-a1d2-105f6bf182b6-kube-api-access-4rzkp\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.575160 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-log-httpd\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.575222 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.579745 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.676349 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rzkp\" (UniqueName: \"kubernetes.io/projected/ff7919ef-56e4-4690-a1d2-105f6bf182b6-kube-api-access-4rzkp\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.676638 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-log-httpd\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.676661 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.676706 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-scripts\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.676729 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-run-httpd\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.676778 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-config-data\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.676803 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.677634 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-run-httpd\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.677882 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-log-httpd\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.684435 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-config-data\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.689470 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.704233 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-scripts\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.704901 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:22 crc kubenswrapper[4813]: I0320 16:00:22.709520 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rzkp\" (UniqueName: \"kubernetes.io/projected/ff7919ef-56e4-4690-a1d2-105f6bf182b6-kube-api-access-4rzkp\") pod \"ceilometer-0\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:23 crc kubenswrapper[4813]: I0320 16:00:23.007556 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:23 crc kubenswrapper[4813]: I0320 16:00:23.031341 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-4nm2s"] Mar 20 16:00:23 crc kubenswrapper[4813]: I0320 16:00:23.266359 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:23 crc kubenswrapper[4813]: W0320 16:00:23.269688 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff7919ef_56e4_4690_a1d2_105f6bf182b6.slice/crio-5713f0624afccea51fc921014bdf3e16e5e4faa37de39650f519ecc54012fcfb WatchSource:0}: Error finding container 5713f0624afccea51fc921014bdf3e16e5e4faa37de39650f519ecc54012fcfb: Status 404 returned error can't find the container with id 5713f0624afccea51fc921014bdf3e16e5e4faa37de39650f519ecc54012fcfb Mar 20 16:00:24 crc kubenswrapper[4813]: I0320 16:00:24.049905 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" event={"ID":"8edd195e-2956-40af-862d-6ebf3e48660d","Type":"ContainerStarted","Data":"961067924eefb9dd1fdaf9b2e74736bebac11be188943e28b99c9f11f67e03b7"} Mar 20 16:00:24 crc kubenswrapper[4813]: I0320 16:00:24.050415 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" event={"ID":"8edd195e-2956-40af-862d-6ebf3e48660d","Type":"ContainerStarted","Data":"6d447734c50261b730fb5f9ae641f0c0c0af85ad022dd5afcaf30f2d88310cfa"} Mar 20 16:00:24 crc kubenswrapper[4813]: I0320 16:00:24.051144 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerStarted","Data":"5713f0624afccea51fc921014bdf3e16e5e4faa37de39650f519ecc54012fcfb"} Mar 20 16:00:24 crc kubenswrapper[4813]: I0320 16:00:24.574203 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" podStartSLOduration=2.574182151 podStartE2EDuration="2.574182151s" podCreationTimestamp="2026-03-20 16:00:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:00:24.090977912 +0000 UTC m=+1353.513680753" watchObservedRunningTime="2026-03-20 16:00:24.574182151 +0000 UTC m=+1353.996884982" Mar 20 16:00:24 crc kubenswrapper[4813]: I0320 16:00:24.579619 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:28 crc kubenswrapper[4813]: I0320 16:00:28.096340 4813 generic.go:334] "Generic (PLEG): container finished" podID="8edd195e-2956-40af-862d-6ebf3e48660d" containerID="961067924eefb9dd1fdaf9b2e74736bebac11be188943e28b99c9f11f67e03b7" exitCode=0 Mar 20 16:00:28 crc kubenswrapper[4813]: I0320 16:00:28.096441 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" event={"ID":"8edd195e-2956-40af-862d-6ebf3e48660d","Type":"ContainerDied","Data":"961067924eefb9dd1fdaf9b2e74736bebac11be188943e28b99c9f11f67e03b7"} Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.107602 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerStarted","Data":"6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048"} Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.521988 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.696738 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-config-data\") pod \"8edd195e-2956-40af-862d-6ebf3e48660d\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.697022 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tl4p\" (UniqueName: \"kubernetes.io/projected/8edd195e-2956-40af-862d-6ebf3e48660d-kube-api-access-6tl4p\") pod \"8edd195e-2956-40af-862d-6ebf3e48660d\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.697059 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-scripts\") pod \"8edd195e-2956-40af-862d-6ebf3e48660d\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.697095 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-fernet-keys\") pod \"8edd195e-2956-40af-862d-6ebf3e48660d\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.697153 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-credential-keys\") pod \"8edd195e-2956-40af-862d-6ebf3e48660d\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.697203 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-combined-ca-bundle\") pod \"8edd195e-2956-40af-862d-6ebf3e48660d\" (UID: \"8edd195e-2956-40af-862d-6ebf3e48660d\") " Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.703745 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8edd195e-2956-40af-862d-6ebf3e48660d" (UID: "8edd195e-2956-40af-862d-6ebf3e48660d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.704066 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-scripts" (OuterVolumeSpecName: "scripts") pod "8edd195e-2956-40af-862d-6ebf3e48660d" (UID: "8edd195e-2956-40af-862d-6ebf3e48660d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.704211 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8edd195e-2956-40af-862d-6ebf3e48660d" (UID: "8edd195e-2956-40af-862d-6ebf3e48660d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.706594 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8edd195e-2956-40af-862d-6ebf3e48660d-kube-api-access-6tl4p" (OuterVolumeSpecName: "kube-api-access-6tl4p") pod "8edd195e-2956-40af-862d-6ebf3e48660d" (UID: "8edd195e-2956-40af-862d-6ebf3e48660d"). InnerVolumeSpecName "kube-api-access-6tl4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.723875 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8edd195e-2956-40af-862d-6ebf3e48660d" (UID: "8edd195e-2956-40af-862d-6ebf3e48660d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.726002 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-config-data" (OuterVolumeSpecName: "config-data") pod "8edd195e-2956-40af-862d-6ebf3e48660d" (UID: "8edd195e-2956-40af-862d-6ebf3e48660d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.799061 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-fernet-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.799095 4813 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-credential-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.799106 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.799115 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.799122 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tl4p\" (UniqueName: \"kubernetes.io/projected/8edd195e-2956-40af-862d-6ebf3e48660d-kube-api-access-6tl4p\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:29 crc kubenswrapper[4813]: I0320 16:00:29.799132 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8edd195e-2956-40af-862d-6ebf3e48660d-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.116197 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" event={"ID":"8edd195e-2956-40af-862d-6ebf3e48660d","Type":"ContainerDied","Data":"6d447734c50261b730fb5f9ae641f0c0c0af85ad022dd5afcaf30f2d88310cfa"} Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.116512 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d447734c50261b730fb5f9ae641f0c0c0af85ad022dd5afcaf30f2d88310cfa" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.116265 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-4nm2s" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.175223 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-4nm2s"] Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.182928 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-4nm2s"] Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.285121 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-6l7q5"] Mar 20 16:00:30 crc kubenswrapper[4813]: E0320 16:00:30.286014 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8edd195e-2956-40af-862d-6ebf3e48660d" containerName="keystone-bootstrap" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.286163 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8edd195e-2956-40af-862d-6ebf3e48660d" containerName="keystone-bootstrap" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.286594 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8edd195e-2956-40af-862d-6ebf3e48660d" containerName="keystone-bootstrap" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.287647 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.293172 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-6l7q5"] Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.325871 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-combined-ca-bundle\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.325983 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-fernet-keys\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326015 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-scripts\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326182 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-config-data\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326199 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326225 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwg87\" (UniqueName: \"kubernetes.io/projected/cee890fd-7087-4331-958a-6098985056be-kube-api-access-zwg87\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326278 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-credential-keys\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326404 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-zpjf7" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326545 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"osp-secret" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326607 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.326790 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.428284 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-fernet-keys\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.428335 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-scripts\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.428415 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-config-data\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.428437 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwg87\" (UniqueName: \"kubernetes.io/projected/cee890fd-7087-4331-958a-6098985056be-kube-api-access-zwg87\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.428456 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-credential-keys\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.428510 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-combined-ca-bundle\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.433132 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-scripts\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.433169 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-credential-keys\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.433244 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-combined-ca-bundle\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.435425 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-fernet-keys\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.444209 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-config-data\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.447313 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwg87\" (UniqueName: \"kubernetes.io/projected/cee890fd-7087-4331-958a-6098985056be-kube-api-access-zwg87\") pod \"keystone-bootstrap-6l7q5\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:30 crc kubenswrapper[4813]: I0320 16:00:30.652957 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:31 crc kubenswrapper[4813]: I0320 16:00:31.133987 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerStarted","Data":"9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f"} Mar 20 16:00:31 crc kubenswrapper[4813]: I0320 16:00:31.180720 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-6l7q5"] Mar 20 16:00:31 crc kubenswrapper[4813]: I0320 16:00:31.280030 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8edd195e-2956-40af-862d-6ebf3e48660d" path="/var/lib/kubelet/pods/8edd195e-2956-40af-862d-6ebf3e48660d/volumes" Mar 20 16:00:32 crc kubenswrapper[4813]: I0320 16:00:32.167332 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" event={"ID":"cee890fd-7087-4331-958a-6098985056be","Type":"ContainerStarted","Data":"b06728261a4222fcffa73e2c79d74b7107e355a6c398f3cdf7e573a5436a63f2"} Mar 20 16:00:32 crc kubenswrapper[4813]: I0320 16:00:32.167737 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" event={"ID":"cee890fd-7087-4331-958a-6098985056be","Type":"ContainerStarted","Data":"88b24f53099adfe357ba707a2fe2588d4722ee4dbbea3acd71489269dbe5b232"} Mar 20 16:00:32 crc kubenswrapper[4813]: I0320 16:00:32.198750 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" podStartSLOduration=2.198731945 podStartE2EDuration="2.198731945s" podCreationTimestamp="2026-03-20 16:00:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:00:32.19632764 +0000 UTC m=+1361.619030491" watchObservedRunningTime="2026-03-20 16:00:32.198731945 +0000 UTC m=+1361.621434786" Mar 20 16:00:33 crc kubenswrapper[4813]: I0320 16:00:33.842856 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:00:33 crc kubenswrapper[4813]: I0320 16:00:33.843194 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:00:33 crc kubenswrapper[4813]: I0320 16:00:33.843236 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 16:00:33 crc kubenswrapper[4813]: I0320 16:00:33.843920 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"957002a8822874ec45a5cbe2ca3717cafc8492693f474f6fa7fd364b2cfa8d50"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 16:00:33 crc kubenswrapper[4813]: I0320 16:00:33.844290 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://957002a8822874ec45a5cbe2ca3717cafc8492693f474f6fa7fd364b2cfa8d50" gracePeriod=600 Mar 20 16:00:34 crc kubenswrapper[4813]: I0320 16:00:34.188136 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="957002a8822874ec45a5cbe2ca3717cafc8492693f474f6fa7fd364b2cfa8d50" exitCode=0 Mar 20 16:00:34 crc kubenswrapper[4813]: I0320 16:00:34.188192 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"957002a8822874ec45a5cbe2ca3717cafc8492693f474f6fa7fd364b2cfa8d50"} Mar 20 16:00:34 crc kubenswrapper[4813]: I0320 16:00:34.188235 4813 scope.go:117] "RemoveContainer" containerID="58e6bc2a6d9001d676e2ad6e8e29a8d0759512d8590133534bf74186af5900d4" Mar 20 16:00:35 crc kubenswrapper[4813]: I0320 16:00:35.198751 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"238a56263adb7631a4a6e1bdb38d22562989800e25b84edc88b747390df1d3e7"} Mar 20 16:00:35 crc kubenswrapper[4813]: I0320 16:00:35.201127 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerStarted","Data":"5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9"} Mar 20 16:00:35 crc kubenswrapper[4813]: I0320 16:00:35.202690 4813 generic.go:334] "Generic (PLEG): container finished" podID="cee890fd-7087-4331-958a-6098985056be" containerID="b06728261a4222fcffa73e2c79d74b7107e355a6c398f3cdf7e573a5436a63f2" exitCode=0 Mar 20 16:00:35 crc kubenswrapper[4813]: I0320 16:00:35.202734 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" event={"ID":"cee890fd-7087-4331-958a-6098985056be","Type":"ContainerDied","Data":"b06728261a4222fcffa73e2c79d74b7107e355a6c398f3cdf7e573a5436a63f2"} Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.528087 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.643292 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-combined-ca-bundle\") pod \"cee890fd-7087-4331-958a-6098985056be\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.643380 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-fernet-keys\") pod \"cee890fd-7087-4331-958a-6098985056be\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.643470 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwg87\" (UniqueName: \"kubernetes.io/projected/cee890fd-7087-4331-958a-6098985056be-kube-api-access-zwg87\") pod \"cee890fd-7087-4331-958a-6098985056be\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.643517 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-config-data\") pod \"cee890fd-7087-4331-958a-6098985056be\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.643593 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-scripts\") pod \"cee890fd-7087-4331-958a-6098985056be\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.643627 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-credential-keys\") pod \"cee890fd-7087-4331-958a-6098985056be\" (UID: \"cee890fd-7087-4331-958a-6098985056be\") " Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.648933 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-scripts" (OuterVolumeSpecName: "scripts") pod "cee890fd-7087-4331-958a-6098985056be" (UID: "cee890fd-7087-4331-958a-6098985056be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.649264 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cee890fd-7087-4331-958a-6098985056be-kube-api-access-zwg87" (OuterVolumeSpecName: "kube-api-access-zwg87") pod "cee890fd-7087-4331-958a-6098985056be" (UID: "cee890fd-7087-4331-958a-6098985056be"). InnerVolumeSpecName "kube-api-access-zwg87". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.649406 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "cee890fd-7087-4331-958a-6098985056be" (UID: "cee890fd-7087-4331-958a-6098985056be"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.650101 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cee890fd-7087-4331-958a-6098985056be" (UID: "cee890fd-7087-4331-958a-6098985056be"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.666076 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-config-data" (OuterVolumeSpecName: "config-data") pod "cee890fd-7087-4331-958a-6098985056be" (UID: "cee890fd-7087-4331-958a-6098985056be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.669650 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cee890fd-7087-4331-958a-6098985056be" (UID: "cee890fd-7087-4331-958a-6098985056be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.745161 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.745199 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.745208 4813 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-credential-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.745220 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.745229 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cee890fd-7087-4331-958a-6098985056be-fernet-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:36 crc kubenswrapper[4813]: I0320 16:00:36.745238 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwg87\" (UniqueName: \"kubernetes.io/projected/cee890fd-7087-4331-958a-6098985056be-kube-api-access-zwg87\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.226455 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" event={"ID":"cee890fd-7087-4331-958a-6098985056be","Type":"ContainerDied","Data":"88b24f53099adfe357ba707a2fe2588d4722ee4dbbea3acd71489269dbe5b232"} Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.226941 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88b24f53099adfe357ba707a2fe2588d4722ee4dbbea3acd71489269dbe5b232" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.226556 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-6l7q5" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.342661 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-8469ccf466-qnct6"] Mar 20 16:00:37 crc kubenswrapper[4813]: E0320 16:00:37.343147 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cee890fd-7087-4331-958a-6098985056be" containerName="keystone-bootstrap" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.343162 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="cee890fd-7087-4331-958a-6098985056be" containerName="keystone-bootstrap" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.343356 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="cee890fd-7087-4331-958a-6098985056be" containerName="keystone-bootstrap" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.344037 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.347432 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353394 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-fernet-keys\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353472 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnhmf\" (UniqueName: \"kubernetes.io/projected/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-kube-api-access-nnhmf\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353535 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-credential-keys\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353575 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353600 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-public-tls-certs\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-scripts\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353660 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-config-data\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353792 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-combined-ca-bundle\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353814 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-internal-tls-certs\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353836 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-keystone-public-svc" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.353867 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-zpjf7" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.356359 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.356625 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-keystone-internal-svc" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.368749 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-8469ccf466-qnct6"] Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455665 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-credential-keys\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455735 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-public-tls-certs\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455769 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-scripts\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455792 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-config-data\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-combined-ca-bundle\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455879 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-internal-tls-certs\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455908 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-fernet-keys\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.455934 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnhmf\" (UniqueName: \"kubernetes.io/projected/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-kube-api-access-nnhmf\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.461667 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-scripts\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.461755 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-public-tls-certs\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.462118 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-internal-tls-certs\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.462228 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-combined-ca-bundle\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.462619 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-fernet-keys\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.462884 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-credential-keys\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.463527 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-config-data\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.480057 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnhmf\" (UniqueName: \"kubernetes.io/projected/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-kube-api-access-nnhmf\") pod \"keystone-8469ccf466-qnct6\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:37 crc kubenswrapper[4813]: I0320 16:00:37.683857 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:38 crc kubenswrapper[4813]: I0320 16:00:38.194435 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-8469ccf466-qnct6"] Mar 20 16:00:38 crc kubenswrapper[4813]: W0320 16:00:38.214894 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb6d5669_5b48_48ab_8f34_5cd0ecb91d93.slice/crio-d2cf3e7c7c6e7f0a17ad573b7b1979a2fb2f5d8a848d6c300172842c24c7198c WatchSource:0}: Error finding container d2cf3e7c7c6e7f0a17ad573b7b1979a2fb2f5d8a848d6c300172842c24c7198c: Status 404 returned error can't find the container with id d2cf3e7c7c6e7f0a17ad573b7b1979a2fb2f5d8a848d6c300172842c24c7198c Mar 20 16:00:38 crc kubenswrapper[4813]: I0320 16:00:38.240291 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" event={"ID":"db6d5669-5b48-48ab-8f34-5cd0ecb91d93","Type":"ContainerStarted","Data":"d2cf3e7c7c6e7f0a17ad573b7b1979a2fb2f5d8a848d6c300172842c24c7198c"} Mar 20 16:00:39 crc kubenswrapper[4813]: I0320 16:00:39.253007 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" event={"ID":"db6d5669-5b48-48ab-8f34-5cd0ecb91d93","Type":"ContainerStarted","Data":"500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552"} Mar 20 16:00:39 crc kubenswrapper[4813]: I0320 16:00:39.254331 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:00:39 crc kubenswrapper[4813]: I0320 16:00:39.277775 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" podStartSLOduration=2.277730708 podStartE2EDuration="2.277730708s" podCreationTimestamp="2026-03-20 16:00:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:00:39.27264671 +0000 UTC m=+1368.695349551" watchObservedRunningTime="2026-03-20 16:00:39.277730708 +0000 UTC m=+1368.700433559" Mar 20 16:00:47 crc kubenswrapper[4813]: I0320 16:00:47.331955 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerStarted","Data":"35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f"} Mar 20 16:00:47 crc kubenswrapper[4813]: I0320 16:00:47.332703 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:47 crc kubenswrapper[4813]: I0320 16:00:47.332204 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="proxy-httpd" containerID="cri-o://35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f" gracePeriod=30 Mar 20 16:00:47 crc kubenswrapper[4813]: I0320 16:00:47.332111 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-central-agent" containerID="cri-o://6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048" gracePeriod=30 Mar 20 16:00:47 crc kubenswrapper[4813]: I0320 16:00:47.332252 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="sg-core" containerID="cri-o://5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9" gracePeriod=30 Mar 20 16:00:47 crc kubenswrapper[4813]: I0320 16:00:47.332272 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-notification-agent" containerID="cri-o://9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f" gracePeriod=30 Mar 20 16:00:47 crc kubenswrapper[4813]: I0320 16:00:47.380829 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.7916313480000001 podStartE2EDuration="25.380797011s" podCreationTimestamp="2026-03-20 16:00:22 +0000 UTC" firstStartedPulling="2026-03-20 16:00:23.276088333 +0000 UTC m=+1352.698791174" lastFinishedPulling="2026-03-20 16:00:46.865253986 +0000 UTC m=+1376.287956837" observedRunningTime="2026-03-20 16:00:47.373546165 +0000 UTC m=+1376.796249006" watchObservedRunningTime="2026-03-20 16:00:47.380797011 +0000 UTC m=+1376.803499912" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.342777 4813 generic.go:334] "Generic (PLEG): container finished" podID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerID="35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f" exitCode=0 Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.342813 4813 generic.go:334] "Generic (PLEG): container finished" podID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerID="5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9" exitCode=2 Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.342822 4813 generic.go:334] "Generic (PLEG): container finished" podID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerID="6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048" exitCode=0 Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.342841 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerDied","Data":"35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f"} Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.342869 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerDied","Data":"5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9"} Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.342881 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerDied","Data":"6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048"} Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.788230 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.843016 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-scripts\") pod \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.843132 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rzkp\" (UniqueName: \"kubernetes.io/projected/ff7919ef-56e4-4690-a1d2-105f6bf182b6-kube-api-access-4rzkp\") pod \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.843165 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-config-data\") pod \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.843211 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-sg-core-conf-yaml\") pod \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.843259 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-combined-ca-bundle\") pod \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.843323 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-log-httpd\") pod \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.843368 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-run-httpd\") pod \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\" (UID: \"ff7919ef-56e4-4690-a1d2-105f6bf182b6\") " Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.844307 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ff7919ef-56e4-4690-a1d2-105f6bf182b6" (UID: "ff7919ef-56e4-4690-a1d2-105f6bf182b6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.849441 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ff7919ef-56e4-4690-a1d2-105f6bf182b6" (UID: "ff7919ef-56e4-4690-a1d2-105f6bf182b6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.849775 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-scripts" (OuterVolumeSpecName: "scripts") pod "ff7919ef-56e4-4690-a1d2-105f6bf182b6" (UID: "ff7919ef-56e4-4690-a1d2-105f6bf182b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.853668 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff7919ef-56e4-4690-a1d2-105f6bf182b6-kube-api-access-4rzkp" (OuterVolumeSpecName: "kube-api-access-4rzkp") pod "ff7919ef-56e4-4690-a1d2-105f6bf182b6" (UID: "ff7919ef-56e4-4690-a1d2-105f6bf182b6"). InnerVolumeSpecName "kube-api-access-4rzkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.865398 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ff7919ef-56e4-4690-a1d2-105f6bf182b6" (UID: "ff7919ef-56e4-4690-a1d2-105f6bf182b6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.898977 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff7919ef-56e4-4690-a1d2-105f6bf182b6" (UID: "ff7919ef-56e4-4690-a1d2-105f6bf182b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.931663 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-config-data" (OuterVolumeSpecName: "config-data") pod "ff7919ef-56e4-4690-a1d2-105f6bf182b6" (UID: "ff7919ef-56e4-4690-a1d2-105f6bf182b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.945279 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.945313 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.945323 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.945335 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff7919ef-56e4-4690-a1d2-105f6bf182b6-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.945344 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.945352 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rzkp\" (UniqueName: \"kubernetes.io/projected/ff7919ef-56e4-4690-a1d2-105f6bf182b6-kube-api-access-4rzkp\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:48 crc kubenswrapper[4813]: I0320 16:00:48.945362 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff7919ef-56e4-4690-a1d2-105f6bf182b6-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.365231 4813 generic.go:334] "Generic (PLEG): container finished" podID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerID="9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f" exitCode=0 Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.365288 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerDied","Data":"9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f"} Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.365316 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ff7919ef-56e4-4690-a1d2-105f6bf182b6","Type":"ContainerDied","Data":"5713f0624afccea51fc921014bdf3e16e5e4faa37de39650f519ecc54012fcfb"} Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.365332 4813 scope.go:117] "RemoveContainer" containerID="35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.365521 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.394841 4813 scope.go:117] "RemoveContainer" containerID="5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.408646 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.419499 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.425520 4813 scope.go:117] "RemoveContainer" containerID="9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.428513 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.428884 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-notification-agent" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.428906 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-notification-agent" Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.428925 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="proxy-httpd" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.428934 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="proxy-httpd" Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.428949 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="sg-core" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.428956 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="sg-core" Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.428981 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-central-agent" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.428988 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-central-agent" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.429519 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="sg-core" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.429544 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-central-agent" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.429556 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="proxy-httpd" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.429582 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" containerName="ceilometer-notification-agent" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.431210 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.433884 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.434399 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.449743 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.453125 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.453195 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-run-httpd\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.453220 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fmfk\" (UniqueName: \"kubernetes.io/projected/a49847fd-733a-4632-ab30-0e192394600f-kube-api-access-2fmfk\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.453237 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-scripts\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.453280 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-log-httpd\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.453307 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.453326 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-config-data\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.500900 4813 scope.go:117] "RemoveContainer" containerID="6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.521403 4813 scope.go:117] "RemoveContainer" containerID="35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f" Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.523354 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f\": container with ID starting with 35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f not found: ID does not exist" containerID="35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.523394 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f"} err="failed to get container status \"35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f\": rpc error: code = NotFound desc = could not find container \"35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f\": container with ID starting with 35b3bed04238b20ee580c0dea605ad7e1cd88c91bf5974bd429cb7b2368f031f not found: ID does not exist" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.523418 4813 scope.go:117] "RemoveContainer" containerID="5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9" Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.526764 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9\": container with ID starting with 5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9 not found: ID does not exist" containerID="5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.526793 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9"} err="failed to get container status \"5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9\": rpc error: code = NotFound desc = could not find container \"5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9\": container with ID starting with 5c18e15f1ec5195e7de759d290a6c30ebcc60671681bbd74f1e8ab6013d41cf9 not found: ID does not exist" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.526811 4813 scope.go:117] "RemoveContainer" containerID="9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f" Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.528198 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f\": container with ID starting with 9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f not found: ID does not exist" containerID="9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.528220 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f"} err="failed to get container status \"9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f\": rpc error: code = NotFound desc = could not find container \"9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f\": container with ID starting with 9eb5a409063047312062bac648528df803d825cc6865310a78d5026501d5cd7f not found: ID does not exist" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.528234 4813 scope.go:117] "RemoveContainer" containerID="6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048" Mar 20 16:00:49 crc kubenswrapper[4813]: E0320 16:00:49.529847 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048\": container with ID starting with 6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048 not found: ID does not exist" containerID="6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.529870 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048"} err="failed to get container status \"6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048\": rpc error: code = NotFound desc = could not find container \"6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048\": container with ID starting with 6d000981329d84cbac686ff9396fd310bf1b146686cf4a27bd7406fd1b65b048 not found: ID does not exist" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.555800 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-log-httpd\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.555870 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.555896 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-config-data\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.555951 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.555999 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-run-httpd\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.556024 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fmfk\" (UniqueName: \"kubernetes.io/projected/a49847fd-733a-4632-ab30-0e192394600f-kube-api-access-2fmfk\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.556045 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-scripts\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.558854 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-log-httpd\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.560896 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-run-httpd\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.567513 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-scripts\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.568056 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.572270 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.572411 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-config-data\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.595828 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fmfk\" (UniqueName: \"kubernetes.io/projected/a49847fd-733a-4632-ab30-0e192394600f-kube-api-access-2fmfk\") pod \"ceilometer-0\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:49 crc kubenswrapper[4813]: I0320 16:00:49.807548 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:50 crc kubenswrapper[4813]: I0320 16:00:50.243768 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:00:50 crc kubenswrapper[4813]: I0320 16:00:50.374071 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerStarted","Data":"aeeadc9b929c4bf8d54cfa3875435f5349612cfa21098b54c569c4fba748d386"} Mar 20 16:00:51 crc kubenswrapper[4813]: I0320 16:00:51.292627 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff7919ef-56e4-4690-a1d2-105f6bf182b6" path="/var/lib/kubelet/pods/ff7919ef-56e4-4690-a1d2-105f6bf182b6/volumes" Mar 20 16:00:51 crc kubenswrapper[4813]: I0320 16:00:51.388781 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerStarted","Data":"94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506"} Mar 20 16:00:53 crc kubenswrapper[4813]: I0320 16:00:53.409375 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerStarted","Data":"38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c"} Mar 20 16:00:53 crc kubenswrapper[4813]: I0320 16:00:53.409830 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerStarted","Data":"3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647"} Mar 20 16:00:56 crc kubenswrapper[4813]: I0320 16:00:56.447799 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerStarted","Data":"e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca"} Mar 20 16:00:56 crc kubenswrapper[4813]: I0320 16:00:56.448325 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:00:56 crc kubenswrapper[4813]: I0320 16:00:56.472785 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.448049168 podStartE2EDuration="7.472758293s" podCreationTimestamp="2026-03-20 16:00:49 +0000 UTC" firstStartedPulling="2026-03-20 16:00:50.240858267 +0000 UTC m=+1379.663561118" lastFinishedPulling="2026-03-20 16:00:55.265567362 +0000 UTC m=+1384.688270243" observedRunningTime="2026-03-20 16:00:56.468435206 +0000 UTC m=+1385.891138047" watchObservedRunningTime="2026-03-20 16:00:56.472758293 +0000 UTC m=+1385.895461134" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.173878 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-cron-29567041-94xtk"] Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.182686 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.196855 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-cron-29567041-94xtk"] Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.360520 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-fernet-keys\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.360607 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-combined-ca-bundle\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.360643 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqwrr\" (UniqueName: \"kubernetes.io/projected/d3a49c21-5df2-4013-b93e-8314741903e6-kube-api-access-fqwrr\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.360667 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-config-data\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.461817 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-combined-ca-bundle\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.461931 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqwrr\" (UniqueName: \"kubernetes.io/projected/d3a49c21-5df2-4013-b93e-8314741903e6-kube-api-access-fqwrr\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.461979 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-config-data\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.462171 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-fernet-keys\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.469354 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-fernet-keys\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.469615 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-combined-ca-bundle\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.472011 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-config-data\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.483161 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqwrr\" (UniqueName: \"kubernetes.io/projected/d3a49c21-5df2-4013-b93e-8314741903e6-kube-api-access-fqwrr\") pod \"keystone-cron-29567041-94xtk\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.504036 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:00 crc kubenswrapper[4813]: I0320 16:01:00.965735 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-cron-29567041-94xtk"] Mar 20 16:01:00 crc kubenswrapper[4813]: W0320 16:01:00.968289 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3a49c21_5df2_4013_b93e_8314741903e6.slice/crio-657a7ff328c0adacc8afb1a5d209b7dc24d556bcc55ee789a02412a7071635df WatchSource:0}: Error finding container 657a7ff328c0adacc8afb1a5d209b7dc24d556bcc55ee789a02412a7071635df: Status 404 returned error can't find the container with id 657a7ff328c0adacc8afb1a5d209b7dc24d556bcc55ee789a02412a7071635df Mar 20 16:01:01 crc kubenswrapper[4813]: I0320 16:01:01.495621 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" event={"ID":"d3a49c21-5df2-4013-b93e-8314741903e6","Type":"ContainerStarted","Data":"e1d48e9c1894e902abe03d6a98e039ac304212adad3ff2f7e2970ef9938d4130"} Mar 20 16:01:01 crc kubenswrapper[4813]: I0320 16:01:01.495970 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" event={"ID":"d3a49c21-5df2-4013-b93e-8314741903e6","Type":"ContainerStarted","Data":"657a7ff328c0adacc8afb1a5d209b7dc24d556bcc55ee789a02412a7071635df"} Mar 20 16:01:01 crc kubenswrapper[4813]: I0320 16:01:01.522799 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" podStartSLOduration=1.522771283 podStartE2EDuration="1.522771283s" podCreationTimestamp="2026-03-20 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:01:01.515670011 +0000 UTC m=+1390.938372862" watchObservedRunningTime="2026-03-20 16:01:01.522771283 +0000 UTC m=+1390.945474134" Mar 20 16:01:03 crc kubenswrapper[4813]: I0320 16:01:03.520325 4813 generic.go:334] "Generic (PLEG): container finished" podID="d3a49c21-5df2-4013-b93e-8314741903e6" containerID="e1d48e9c1894e902abe03d6a98e039ac304212adad3ff2f7e2970ef9938d4130" exitCode=0 Mar 20 16:01:03 crc kubenswrapper[4813]: I0320 16:01:03.520400 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" event={"ID":"d3a49c21-5df2-4013-b93e-8314741903e6","Type":"ContainerDied","Data":"e1d48e9c1894e902abe03d6a98e039ac304212adad3ff2f7e2970ef9938d4130"} Mar 20 16:01:04 crc kubenswrapper[4813]: I0320 16:01:04.909092 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.044545 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-combined-ca-bundle\") pod \"d3a49c21-5df2-4013-b93e-8314741903e6\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.044675 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqwrr\" (UniqueName: \"kubernetes.io/projected/d3a49c21-5df2-4013-b93e-8314741903e6-kube-api-access-fqwrr\") pod \"d3a49c21-5df2-4013-b93e-8314741903e6\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.044725 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-fernet-keys\") pod \"d3a49c21-5df2-4013-b93e-8314741903e6\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.044785 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-config-data\") pod \"d3a49c21-5df2-4013-b93e-8314741903e6\" (UID: \"d3a49c21-5df2-4013-b93e-8314741903e6\") " Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.050190 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d3a49c21-5df2-4013-b93e-8314741903e6" (UID: "d3a49c21-5df2-4013-b93e-8314741903e6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.050853 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3a49c21-5df2-4013-b93e-8314741903e6-kube-api-access-fqwrr" (OuterVolumeSpecName: "kube-api-access-fqwrr") pod "d3a49c21-5df2-4013-b93e-8314741903e6" (UID: "d3a49c21-5df2-4013-b93e-8314741903e6"). InnerVolumeSpecName "kube-api-access-fqwrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.086917 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3a49c21-5df2-4013-b93e-8314741903e6" (UID: "d3a49c21-5df2-4013-b93e-8314741903e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.098031 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-config-data" (OuterVolumeSpecName: "config-data") pod "d3a49c21-5df2-4013-b93e-8314741903e6" (UID: "d3a49c21-5df2-4013-b93e-8314741903e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.152677 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.152716 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqwrr\" (UniqueName: \"kubernetes.io/projected/d3a49c21-5df2-4013-b93e-8314741903e6-kube-api-access-fqwrr\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.152729 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-fernet-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.152737 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a49c21-5df2-4013-b93e-8314741903e6-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.544863 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" event={"ID":"d3a49c21-5df2-4013-b93e-8314741903e6","Type":"ContainerDied","Data":"657a7ff328c0adacc8afb1a5d209b7dc24d556bcc55ee789a02412a7071635df"} Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.545195 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="657a7ff328c0adacc8afb1a5d209b7dc24d556bcc55ee789a02412a7071635df" Mar 20 16:01:05 crc kubenswrapper[4813]: I0320 16:01:05.544947 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-cron-29567041-94xtk" Mar 20 16:01:09 crc kubenswrapper[4813]: I0320 16:01:09.236430 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.074672 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/openstackclient"] Mar 20 16:01:14 crc kubenswrapper[4813]: E0320 16:01:14.075520 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3a49c21-5df2-4013-b93e-8314741903e6" containerName="keystone-cron" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.075538 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3a49c21-5df2-4013-b93e-8314741903e6" containerName="keystone-cron" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.075771 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3a49c21-5df2-4013-b93e-8314741903e6" containerName="keystone-cron" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.076456 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.079389 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openstack-config" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.079445 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"openstack-config-secret" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.084195 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"openstackclient-openstackclient-dockercfg-whrp5" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.086659 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstackclient"] Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.197319 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfcqt\" (UniqueName: \"kubernetes.io/projected/8cf25919-005b-4b34-98eb-28cacad075b4-kube-api-access-sfcqt\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.197647 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8cf25919-005b-4b34-98eb-28cacad075b4-openstack-config\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.197782 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8cf25919-005b-4b34-98eb-28cacad075b4-openstack-config-secret\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.197830 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf25919-005b-4b34-98eb-28cacad075b4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.299590 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfcqt\" (UniqueName: \"kubernetes.io/projected/8cf25919-005b-4b34-98eb-28cacad075b4-kube-api-access-sfcqt\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.299794 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8cf25919-005b-4b34-98eb-28cacad075b4-openstack-config\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.299847 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8cf25919-005b-4b34-98eb-28cacad075b4-openstack-config-secret\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.299881 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf25919-005b-4b34-98eb-28cacad075b4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.300974 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8cf25919-005b-4b34-98eb-28cacad075b4-openstack-config\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.309889 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf25919-005b-4b34-98eb-28cacad075b4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.323261 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8cf25919-005b-4b34-98eb-28cacad075b4-openstack-config-secret\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.323801 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfcqt\" (UniqueName: \"kubernetes.io/projected/8cf25919-005b-4b34-98eb-28cacad075b4-kube-api-access-sfcqt\") pod \"openstackclient\" (UID: \"8cf25919-005b-4b34-98eb-28cacad075b4\") " pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.406170 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstackclient" Mar 20 16:01:14 crc kubenswrapper[4813]: I0320 16:01:14.918227 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstackclient"] Mar 20 16:01:14 crc kubenswrapper[4813]: W0320 16:01:14.928175 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cf25919_005b_4b34_98eb_28cacad075b4.slice/crio-b49858b4e29a958440e463f933a88cd32ee5d565cfc8ae10c21e9ce4ac316cc9 WatchSource:0}: Error finding container b49858b4e29a958440e463f933a88cd32ee5d565cfc8ae10c21e9ce4ac316cc9: Status 404 returned error can't find the container with id b49858b4e29a958440e463f933a88cd32ee5d565cfc8ae10c21e9ce4ac316cc9 Mar 20 16:01:15 crc kubenswrapper[4813]: I0320 16:01:15.619844 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstackclient" event={"ID":"8cf25919-005b-4b34-98eb-28cacad075b4","Type":"ContainerStarted","Data":"b49858b4e29a958440e463f933a88cd32ee5d565cfc8ae10c21e9ce4ac316cc9"} Mar 20 16:01:15 crc kubenswrapper[4813]: I0320 16:01:15.871635 4813 scope.go:117] "RemoveContainer" containerID="550cb2b0850678e616e2e311c696d7d9b678adc681667f5955489f0caf52fa5c" Mar 20 16:01:19 crc kubenswrapper[4813]: I0320 16:01:19.904544 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:22 crc kubenswrapper[4813]: I0320 16:01:22.275318 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 16:01:22 crc kubenswrapper[4813]: I0320 16:01:22.275795 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/kube-state-metrics-0" podUID="7ff4d7e5-cf19-41e9-b489-3361f7eb2fca" containerName="kube-state-metrics" containerID="cri-o://b61a1e59b9e371f7ede1a7023b2ac8fcf40bbd9cb139d43ee110280b0562e550" gracePeriod=30 Mar 20 16:01:22 crc kubenswrapper[4813]: I0320 16:01:22.688713 4813 generic.go:334] "Generic (PLEG): container finished" podID="7ff4d7e5-cf19-41e9-b489-3361f7eb2fca" containerID="b61a1e59b9e371f7ede1a7023b2ac8fcf40bbd9cb139d43ee110280b0562e550" exitCode=2 Mar 20 16:01:22 crc kubenswrapper[4813]: I0320 16:01:22.688814 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca","Type":"ContainerDied","Data":"b61a1e59b9e371f7ede1a7023b2ac8fcf40bbd9cb139d43ee110280b0562e550"} Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.221939 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.222205 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-central-agent" containerID="cri-o://94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506" gracePeriod=30 Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.222330 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="sg-core" containerID="cri-o://38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c" gracePeriod=30 Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.222371 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="proxy-httpd" containerID="cri-o://e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca" gracePeriod=30 Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.222339 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-notification-agent" containerID="cri-o://3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647" gracePeriod=30 Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.699007 4813 generic.go:334] "Generic (PLEG): container finished" podID="a49847fd-733a-4632-ab30-0e192394600f" containerID="e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca" exitCode=0 Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.699041 4813 generic.go:334] "Generic (PLEG): container finished" podID="a49847fd-733a-4632-ab30-0e192394600f" containerID="38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c" exitCode=2 Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.699057 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerDied","Data":"e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca"} Mar 20 16:01:23 crc kubenswrapper[4813]: I0320 16:01:23.699081 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerDied","Data":"38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c"} Mar 20 16:01:24 crc kubenswrapper[4813]: I0320 16:01:24.709321 4813 generic.go:334] "Generic (PLEG): container finished" podID="a49847fd-733a-4632-ab30-0e192394600f" containerID="94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506" exitCode=0 Mar 20 16:01:24 crc kubenswrapper[4813]: I0320 16:01:24.709417 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerDied","Data":"94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506"} Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.086102 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.193815 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdm6n\" (UniqueName: \"kubernetes.io/projected/7ff4d7e5-cf19-41e9-b489-3361f7eb2fca-kube-api-access-jdm6n\") pod \"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca\" (UID: \"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.200546 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ff4d7e5-cf19-41e9-b489-3361f7eb2fca-kube-api-access-jdm6n" (OuterVolumeSpecName: "kube-api-access-jdm6n") pod "7ff4d7e5-cf19-41e9-b489-3361f7eb2fca" (UID: "7ff4d7e5-cf19-41e9-b489-3361f7eb2fca"). InnerVolumeSpecName "kube-api-access-jdm6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.295840 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdm6n\" (UniqueName: \"kubernetes.io/projected/7ff4d7e5-cf19-41e9-b489-3361f7eb2fca-kube-api-access-jdm6n\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.496833 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599191 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-run-httpd\") pod \"a49847fd-733a-4632-ab30-0e192394600f\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599434 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-sg-core-conf-yaml\") pod \"a49847fd-733a-4632-ab30-0e192394600f\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599580 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a49847fd-733a-4632-ab30-0e192394600f" (UID: "a49847fd-733a-4632-ab30-0e192394600f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599597 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-config-data\") pod \"a49847fd-733a-4632-ab30-0e192394600f\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599736 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-scripts\") pod \"a49847fd-733a-4632-ab30-0e192394600f\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599804 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-combined-ca-bundle\") pod \"a49847fd-733a-4632-ab30-0e192394600f\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599835 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fmfk\" (UniqueName: \"kubernetes.io/projected/a49847fd-733a-4632-ab30-0e192394600f-kube-api-access-2fmfk\") pod \"a49847fd-733a-4632-ab30-0e192394600f\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.599948 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-log-httpd\") pod \"a49847fd-733a-4632-ab30-0e192394600f\" (UID: \"a49847fd-733a-4632-ab30-0e192394600f\") " Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.600546 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.600633 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a49847fd-733a-4632-ab30-0e192394600f" (UID: "a49847fd-733a-4632-ab30-0e192394600f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.603992 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a49847fd-733a-4632-ab30-0e192394600f-kube-api-access-2fmfk" (OuterVolumeSpecName: "kube-api-access-2fmfk") pod "a49847fd-733a-4632-ab30-0e192394600f" (UID: "a49847fd-733a-4632-ab30-0e192394600f"). InnerVolumeSpecName "kube-api-access-2fmfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.606683 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-scripts" (OuterVolumeSpecName: "scripts") pod "a49847fd-733a-4632-ab30-0e192394600f" (UID: "a49847fd-733a-4632-ab30-0e192394600f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.634607 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a49847fd-733a-4632-ab30-0e192394600f" (UID: "a49847fd-733a-4632-ab30-0e192394600f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.659860 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a49847fd-733a-4632-ab30-0e192394600f" (UID: "a49847fd-733a-4632-ab30-0e192394600f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.669033 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-config-data" (OuterVolumeSpecName: "config-data") pod "a49847fd-733a-4632-ab30-0e192394600f" (UID: "a49847fd-733a-4632-ab30-0e192394600f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.702633 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a49847fd-733a-4632-ab30-0e192394600f-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.702673 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.702691 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.702704 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.702715 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a49847fd-733a-4632-ab30-0e192394600f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.702728 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fmfk\" (UniqueName: \"kubernetes.io/projected/a49847fd-733a-4632-ab30-0e192394600f-kube-api-access-2fmfk\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.733286 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"7ff4d7e5-cf19-41e9-b489-3361f7eb2fca","Type":"ContainerDied","Data":"4fcffeb23877fd7f6e560152a370a94b2df681834c220843f8ea9ac5c4761172"} Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.733363 4813 scope.go:117] "RemoveContainer" containerID="b61a1e59b9e371f7ede1a7023b2ac8fcf40bbd9cb139d43ee110280b0562e550" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.733361 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.739385 4813 generic.go:334] "Generic (PLEG): container finished" podID="a49847fd-733a-4632-ab30-0e192394600f" containerID="3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647" exitCode=0 Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.739462 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.739475 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerDied","Data":"3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647"} Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.739567 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a49847fd-733a-4632-ab30-0e192394600f","Type":"ContainerDied","Data":"aeeadc9b929c4bf8d54cfa3875435f5349612cfa21098b54c569c4fba748d386"} Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.743131 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstackclient" event={"ID":"8cf25919-005b-4b34-98eb-28cacad075b4","Type":"ContainerStarted","Data":"5e62e4b7b957047be68625c9e772e8b866067cb73a1d2a592a7b930af8f26c1e"} Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.768799 4813 scope.go:117] "RemoveContainer" containerID="e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.772823 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/openstackclient" podStartSLOduration=1.5094162290000002 podStartE2EDuration="12.772802443s" podCreationTimestamp="2026-03-20 16:01:14 +0000 UTC" firstStartedPulling="2026-03-20 16:01:14.930362407 +0000 UTC m=+1404.353065248" lastFinishedPulling="2026-03-20 16:01:26.193748621 +0000 UTC m=+1415.616451462" observedRunningTime="2026-03-20 16:01:26.762352181 +0000 UTC m=+1416.185055022" watchObservedRunningTime="2026-03-20 16:01:26.772802443 +0000 UTC m=+1416.195505284" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.797621 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.813958 4813 scope.go:117] "RemoveContainer" containerID="38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.814086 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.827187 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.843917 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.850524 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.851191 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="proxy-httpd" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851235 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="proxy-httpd" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.851264 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-central-agent" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851272 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-central-agent" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.851288 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="sg-core" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851318 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="sg-core" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.851350 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-notification-agent" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851358 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-notification-agent" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.851366 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff4d7e5-cf19-41e9-b489-3361f7eb2fca" containerName="kube-state-metrics" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851396 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff4d7e5-cf19-41e9-b489-3361f7eb2fca" containerName="kube-state-metrics" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851692 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="sg-core" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851740 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-notification-agent" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851761 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff4d7e5-cf19-41e9-b489-3361f7eb2fca" containerName="kube-state-metrics" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851778 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="ceilometer-central-agent" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.851846 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a49847fd-733a-4632-ab30-0e192394600f" containerName="proxy-httpd" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.852986 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.855209 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"kube-state-metrics-tls-config" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.855614 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"telemetry-ceilometer-dockercfg-q4p6n" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.855877 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-kube-state-metrics-svc" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.860268 4813 scope.go:117] "RemoveContainer" containerID="3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.868152 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.870710 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.873617 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.873635 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.874090 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.889794 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.897079 4813 scope.go:117] "RemoveContainer" containerID="94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.900266 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.920469 4813 scope.go:117] "RemoveContainer" containerID="e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.920917 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca\": container with ID starting with e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca not found: ID does not exist" containerID="e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.920956 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca"} err="failed to get container status \"e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca\": rpc error: code = NotFound desc = could not find container \"e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca\": container with ID starting with e47a0819e22ca52ad0b807c5cb098b24d158b82a54c30dc441c8797c2bb207ca not found: ID does not exist" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.920999 4813 scope.go:117] "RemoveContainer" containerID="38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.921333 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c\": container with ID starting with 38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c not found: ID does not exist" containerID="38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.921370 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c"} err="failed to get container status \"38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c\": rpc error: code = NotFound desc = could not find container \"38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c\": container with ID starting with 38a986a66399787316534589c114efc4e85f1cf31cbf166f035a17cfbb10a95c not found: ID does not exist" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.921397 4813 scope.go:117] "RemoveContainer" containerID="3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.921983 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647\": container with ID starting with 3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647 not found: ID does not exist" containerID="3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.922007 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647"} err="failed to get container status \"3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647\": rpc error: code = NotFound desc = could not find container \"3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647\": container with ID starting with 3773a5a9f70e40aebddfbd85abcb3649601a5fd7dd4c608def56288901a93647 not found: ID does not exist" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.922024 4813 scope.go:117] "RemoveContainer" containerID="94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506" Mar 20 16:01:26 crc kubenswrapper[4813]: E0320 16:01:26.922228 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506\": container with ID starting with 94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506 not found: ID does not exist" containerID="94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506" Mar 20 16:01:26 crc kubenswrapper[4813]: I0320 16:01:26.922253 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506"} err="failed to get container status \"94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506\": rpc error: code = NotFound desc = could not find container \"94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506\": container with ID starting with 94fa07ff9c49001d7b9baf533b43a702da106d02c0ed902c4cf7309569c95506 not found: ID does not exist" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.007878 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008167 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-scripts\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008251 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgm4b\" (UniqueName: \"kubernetes.io/projected/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-api-access-lgm4b\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008348 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008419 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008514 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008628 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-config-data\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008701 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008797 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gdwh\" (UniqueName: \"kubernetes.io/projected/2601574a-36c2-45b2-9369-b3804694a6e5-kube-api-access-2gdwh\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008863 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.008972 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-log-httpd\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.009041 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-run-httpd\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.110871 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-log-httpd\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.110932 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-run-httpd\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.110973 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.110993 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-scripts\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111019 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgm4b\" (UniqueName: \"kubernetes.io/projected/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-api-access-lgm4b\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111049 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111071 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111090 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111123 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-config-data\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111146 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111176 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gdwh\" (UniqueName: \"kubernetes.io/projected/2601574a-36c2-45b2-9369-b3804694a6e5-kube-api-access-2gdwh\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111195 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.111660 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-log-httpd\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.112857 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-run-httpd\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.115900 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.116466 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.116701 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-scripts\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.117223 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.117730 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-config-data\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.118664 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5449776-80e0-4686-bd0b-2fd27d88ed52-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.119574 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.120982 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.146032 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgm4b\" (UniqueName: \"kubernetes.io/projected/c5449776-80e0-4686-bd0b-2fd27d88ed52-kube-api-access-lgm4b\") pod \"kube-state-metrics-0\" (UID: \"c5449776-80e0-4686-bd0b-2fd27d88ed52\") " pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.152814 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gdwh\" (UniqueName: \"kubernetes.io/projected/2601574a-36c2-45b2-9369-b3804694a6e5-kube-api-access-2gdwh\") pod \"ceilometer-0\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.169446 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.189675 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.293506 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ff4d7e5-cf19-41e9-b489-3361f7eb2fca" path="/var/lib/kubelet/pods/7ff4d7e5-cf19-41e9-b489-3361f7eb2fca/volumes" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.294249 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a49847fd-733a-4632-ab30-0e192394600f" path="/var/lib/kubelet/pods/a49847fd-733a-4632-ab30-0e192394600f/volumes" Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.719909 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Mar 20 16:01:27 crc kubenswrapper[4813]: W0320 16:01:27.733191 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5449776_80e0_4686_bd0b_2fd27d88ed52.slice/crio-70931682e71defb683152596f20ecec511b2683fef57bbdacc01392b6e6af7a4 WatchSource:0}: Error finding container 70931682e71defb683152596f20ecec511b2683fef57bbdacc01392b6e6af7a4: Status 404 returned error can't find the container with id 70931682e71defb683152596f20ecec511b2683fef57bbdacc01392b6e6af7a4 Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.751811 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"c5449776-80e0-4686-bd0b-2fd27d88ed52","Type":"ContainerStarted","Data":"70931682e71defb683152596f20ecec511b2683fef57bbdacc01392b6e6af7a4"} Mar 20 16:01:27 crc kubenswrapper[4813]: I0320 16:01:27.810653 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:01:27 crc kubenswrapper[4813]: W0320 16:01:27.815789 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2601574a_36c2_45b2_9369_b3804694a6e5.slice/crio-a535084ef304e74fec3e9137806bde0e5e81c9be812c034a6f96771f2ed819f2 WatchSource:0}: Error finding container a535084ef304e74fec3e9137806bde0e5e81c9be812c034a6f96771f2ed819f2: Status 404 returned error can't find the container with id a535084ef304e74fec3e9137806bde0e5e81c9be812c034a6f96771f2ed819f2 Mar 20 16:01:28 crc kubenswrapper[4813]: I0320 16:01:28.759900 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"c5449776-80e0-4686-bd0b-2fd27d88ed52","Type":"ContainerStarted","Data":"8906c045a689bfc775bbc787a6b0e93e58bd55559a9743334bc83e2c43908c0c"} Mar 20 16:01:28 crc kubenswrapper[4813]: I0320 16:01:28.760158 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:28 crc kubenswrapper[4813]: I0320 16:01:28.762325 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerStarted","Data":"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab"} Mar 20 16:01:28 crc kubenswrapper[4813]: I0320 16:01:28.762358 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerStarted","Data":"a535084ef304e74fec3e9137806bde0e5e81c9be812c034a6f96771f2ed819f2"} Mar 20 16:01:28 crc kubenswrapper[4813]: I0320 16:01:28.782337 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/kube-state-metrics-0" podStartSLOduration=2.291112313 podStartE2EDuration="2.78231834s" podCreationTimestamp="2026-03-20 16:01:26 +0000 UTC" firstStartedPulling="2026-03-20 16:01:27.735502685 +0000 UTC m=+1417.158205526" lastFinishedPulling="2026-03-20 16:01:28.226708712 +0000 UTC m=+1417.649411553" observedRunningTime="2026-03-20 16:01:28.776188194 +0000 UTC m=+1418.198891025" watchObservedRunningTime="2026-03-20 16:01:28.78231834 +0000 UTC m=+1418.205021171" Mar 20 16:01:29 crc kubenswrapper[4813]: I0320 16:01:29.771567 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerStarted","Data":"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42"} Mar 20 16:01:30 crc kubenswrapper[4813]: I0320 16:01:30.787181 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerStarted","Data":"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2"} Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.318053 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-qqmk2"] Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.319354 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.328950 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qqmk2"] Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.404391 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rps6h\" (UniqueName: \"kubernetes.io/projected/60365901-d14b-436f-828e-070910ceccba-kube-api-access-rps6h\") pod \"watcher-db-create-qqmk2\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.404460 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60365901-d14b-436f-828e-070910ceccba-operator-scripts\") pod \"watcher-db-create-qqmk2\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.423299 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-854b-account-create-update-hfhhk"] Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.424570 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.428931 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.434597 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-854b-account-create-update-hfhhk"] Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.505363 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60365901-d14b-436f-828e-070910ceccba-operator-scripts\") pod \"watcher-db-create-qqmk2\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.505799 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzgvg\" (UniqueName: \"kubernetes.io/projected/deea0819-58b8-4780-8e3c-49dd4185a4e3-kube-api-access-fzgvg\") pod \"watcher-854b-account-create-update-hfhhk\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.505842 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deea0819-58b8-4780-8e3c-49dd4185a4e3-operator-scripts\") pod \"watcher-854b-account-create-update-hfhhk\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.505949 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rps6h\" (UniqueName: \"kubernetes.io/projected/60365901-d14b-436f-828e-070910ceccba-kube-api-access-rps6h\") pod \"watcher-db-create-qqmk2\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.507231 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60365901-d14b-436f-828e-070910ceccba-operator-scripts\") pod \"watcher-db-create-qqmk2\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.542515 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rps6h\" (UniqueName: \"kubernetes.io/projected/60365901-d14b-436f-828e-070910ceccba-kube-api-access-rps6h\") pod \"watcher-db-create-qqmk2\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.607616 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzgvg\" (UniqueName: \"kubernetes.io/projected/deea0819-58b8-4780-8e3c-49dd4185a4e3-kube-api-access-fzgvg\") pod \"watcher-854b-account-create-update-hfhhk\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.607684 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deea0819-58b8-4780-8e3c-49dd4185a4e3-operator-scripts\") pod \"watcher-854b-account-create-update-hfhhk\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.608373 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deea0819-58b8-4780-8e3c-49dd4185a4e3-operator-scripts\") pod \"watcher-854b-account-create-update-hfhhk\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.629549 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzgvg\" (UniqueName: \"kubernetes.io/projected/deea0819-58b8-4780-8e3c-49dd4185a4e3-kube-api-access-fzgvg\") pod \"watcher-854b-account-create-update-hfhhk\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.641188 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:31 crc kubenswrapper[4813]: I0320 16:01:31.743202 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.176201 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qqmk2"] Mar 20 16:01:32 crc kubenswrapper[4813]: W0320 16:01:32.181516 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60365901_d14b_436f_828e_070910ceccba.slice/crio-8f520388b98cc43d49b5d7def8267cb7012acb1f59e83879c138b6d37ad74b8c WatchSource:0}: Error finding container 8f520388b98cc43d49b5d7def8267cb7012acb1f59e83879c138b6d37ad74b8c: Status 404 returned error can't find the container with id 8f520388b98cc43d49b5d7def8267cb7012acb1f59e83879c138b6d37ad74b8c Mar 20 16:01:32 crc kubenswrapper[4813]: W0320 16:01:32.314701 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddeea0819_58b8_4780_8e3c_49dd4185a4e3.slice/crio-6987f68ee9aabf97cffe9b563337108619d502b4ec4ce170df15fb85b521ca91 WatchSource:0}: Error finding container 6987f68ee9aabf97cffe9b563337108619d502b4ec4ce170df15fb85b521ca91: Status 404 returned error can't find the container with id 6987f68ee9aabf97cffe9b563337108619d502b4ec4ce170df15fb85b521ca91 Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.316556 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-854b-account-create-update-hfhhk"] Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.830281 4813 generic.go:334] "Generic (PLEG): container finished" podID="deea0819-58b8-4780-8e3c-49dd4185a4e3" containerID="362cd5d2bf6cc0f57ea94b865eb5443198780f5d57408067f59dd32c06442d72" exitCode=0 Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.830364 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" event={"ID":"deea0819-58b8-4780-8e3c-49dd4185a4e3","Type":"ContainerDied","Data":"362cd5d2bf6cc0f57ea94b865eb5443198780f5d57408067f59dd32c06442d72"} Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.830389 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" event={"ID":"deea0819-58b8-4780-8e3c-49dd4185a4e3","Type":"ContainerStarted","Data":"6987f68ee9aabf97cffe9b563337108619d502b4ec4ce170df15fb85b521ca91"} Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.836089 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerStarted","Data":"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603"} Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.837336 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.837987 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qqmk2" event={"ID":"60365901-d14b-436f-828e-070910ceccba","Type":"ContainerDied","Data":"7cf14ce9f36d8a6c5b91a01ad085ff8ced781646d780c90da2e7dcda674db211"} Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.837912 4813 generic.go:334] "Generic (PLEG): container finished" podID="60365901-d14b-436f-828e-070910ceccba" containerID="7cf14ce9f36d8a6c5b91a01ad085ff8ced781646d780c90da2e7dcda674db211" exitCode=0 Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.838054 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qqmk2" event={"ID":"60365901-d14b-436f-828e-070910ceccba","Type":"ContainerStarted","Data":"8f520388b98cc43d49b5d7def8267cb7012acb1f59e83879c138b6d37ad74b8c"} Mar 20 16:01:32 crc kubenswrapper[4813]: I0320 16:01:32.896201 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.7436828159999997 podStartE2EDuration="6.896183547s" podCreationTimestamp="2026-03-20 16:01:26 +0000 UTC" firstStartedPulling="2026-03-20 16:01:27.817910762 +0000 UTC m=+1417.240613593" lastFinishedPulling="2026-03-20 16:01:31.970411483 +0000 UTC m=+1421.393114324" observedRunningTime="2026-03-20 16:01:32.895141679 +0000 UTC m=+1422.317844530" watchObservedRunningTime="2026-03-20 16:01:32.896183547 +0000 UTC m=+1422.318886398" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.289799 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.294295 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.359731 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60365901-d14b-436f-828e-070910ceccba-operator-scripts\") pod \"60365901-d14b-436f-828e-070910ceccba\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.359826 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deea0819-58b8-4780-8e3c-49dd4185a4e3-operator-scripts\") pod \"deea0819-58b8-4780-8e3c-49dd4185a4e3\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.359852 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzgvg\" (UniqueName: \"kubernetes.io/projected/deea0819-58b8-4780-8e3c-49dd4185a4e3-kube-api-access-fzgvg\") pod \"deea0819-58b8-4780-8e3c-49dd4185a4e3\" (UID: \"deea0819-58b8-4780-8e3c-49dd4185a4e3\") " Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.359897 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rps6h\" (UniqueName: \"kubernetes.io/projected/60365901-d14b-436f-828e-070910ceccba-kube-api-access-rps6h\") pod \"60365901-d14b-436f-828e-070910ceccba\" (UID: \"60365901-d14b-436f-828e-070910ceccba\") " Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.360218 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/deea0819-58b8-4780-8e3c-49dd4185a4e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "deea0819-58b8-4780-8e3c-49dd4185a4e3" (UID: "deea0819-58b8-4780-8e3c-49dd4185a4e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.360250 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60365901-d14b-436f-828e-070910ceccba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "60365901-d14b-436f-828e-070910ceccba" (UID: "60365901-d14b-436f-828e-070910ceccba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.360352 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60365901-d14b-436f-828e-070910ceccba-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.360364 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/deea0819-58b8-4780-8e3c-49dd4185a4e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.365562 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deea0819-58b8-4780-8e3c-49dd4185a4e3-kube-api-access-fzgvg" (OuterVolumeSpecName: "kube-api-access-fzgvg") pod "deea0819-58b8-4780-8e3c-49dd4185a4e3" (UID: "deea0819-58b8-4780-8e3c-49dd4185a4e3"). InnerVolumeSpecName "kube-api-access-fzgvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.365683 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60365901-d14b-436f-828e-070910ceccba-kube-api-access-rps6h" (OuterVolumeSpecName: "kube-api-access-rps6h") pod "60365901-d14b-436f-828e-070910ceccba" (UID: "60365901-d14b-436f-828e-070910ceccba"). InnerVolumeSpecName "kube-api-access-rps6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.462275 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzgvg\" (UniqueName: \"kubernetes.io/projected/deea0819-58b8-4780-8e3c-49dd4185a4e3-kube-api-access-fzgvg\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.462634 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rps6h\" (UniqueName: \"kubernetes.io/projected/60365901-d14b-436f-828e-070910ceccba-kube-api-access-rps6h\") on node \"crc\" DevicePath \"\"" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.854316 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" event={"ID":"deea0819-58b8-4780-8e3c-49dd4185a4e3","Type":"ContainerDied","Data":"6987f68ee9aabf97cffe9b563337108619d502b4ec4ce170df15fb85b521ca91"} Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.854359 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-854b-account-create-update-hfhhk" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.854381 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6987f68ee9aabf97cffe9b563337108619d502b4ec4ce170df15fb85b521ca91" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.857039 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qqmk2" Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.866758 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qqmk2" event={"ID":"60365901-d14b-436f-828e-070910ceccba","Type":"ContainerDied","Data":"8f520388b98cc43d49b5d7def8267cb7012acb1f59e83879c138b6d37ad74b8c"} Mar 20 16:01:34 crc kubenswrapper[4813]: I0320 16:01:34.866884 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f520388b98cc43d49b5d7def8267cb7012acb1f59e83879c138b6d37ad74b8c" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.651060 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd"] Mar 20 16:01:36 crc kubenswrapper[4813]: E0320 16:01:36.651748 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deea0819-58b8-4780-8e3c-49dd4185a4e3" containerName="mariadb-account-create-update" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.651763 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="deea0819-58b8-4780-8e3c-49dd4185a4e3" containerName="mariadb-account-create-update" Mar 20 16:01:36 crc kubenswrapper[4813]: E0320 16:01:36.651779 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60365901-d14b-436f-828e-070910ceccba" containerName="mariadb-database-create" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.651788 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="60365901-d14b-436f-828e-070910ceccba" containerName="mariadb-database-create" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.651989 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="60365901-d14b-436f-828e-070910ceccba" containerName="mariadb-database-create" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.652010 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="deea0819-58b8-4780-8e3c-49dd4185a4e3" containerName="mariadb-account-create-update" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.652649 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.654565 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.654601 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-9s9qc" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.669554 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd"] Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.799310 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-db-sync-config-data\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.799370 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.799462 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-config-data\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.799681 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nppjn\" (UniqueName: \"kubernetes.io/projected/b223213d-37ca-4b68-b871-e88a8c4811bf-kube-api-access-nppjn\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.901583 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nppjn\" (UniqueName: \"kubernetes.io/projected/b223213d-37ca-4b68-b871-e88a8c4811bf-kube-api-access-nppjn\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.901702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-db-sync-config-data\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.901738 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.901777 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-config-data\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.906697 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-db-sync-config-data\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.906936 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-config-data\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.908761 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.925158 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nppjn\" (UniqueName: \"kubernetes.io/projected/b223213d-37ca-4b68-b871-e88a8c4811bf-kube-api-access-nppjn\") pod \"watcher-kuttl-db-sync-ghlxd\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:36 crc kubenswrapper[4813]: I0320 16:01:36.968448 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:01:37 crc kubenswrapper[4813]: I0320 16:01:37.314667 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/kube-state-metrics-0" Mar 20 16:01:37 crc kubenswrapper[4813]: I0320 16:01:37.636762 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd"] Mar 20 16:01:37 crc kubenswrapper[4813]: I0320 16:01:37.882733 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" event={"ID":"b223213d-37ca-4b68-b871-e88a8c4811bf","Type":"ContainerStarted","Data":"1f10bf6e63c04ae2420f582a3e65d9fb2507bf74912fdaf06ed1aafb7e0534e8"} Mar 20 16:01:57 crc kubenswrapper[4813]: I0320 16:01:57.201534 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:01:57 crc kubenswrapper[4813]: E0320 16:01:57.354078 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.147:5001/podified-master-centos10/openstack-watcher-api:watcher_latest" Mar 20 16:01:57 crc kubenswrapper[4813]: E0320 16:01:57.354154 4813 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.147:5001/podified-master-centos10/openstack-watcher-api:watcher_latest" Mar 20 16:01:57 crc kubenswrapper[4813]: E0320 16:01:57.354316 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:watcher-kuttl-db-sync,Image:38.102.83.147:5001/podified-master-centos10/openstack-watcher-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/watcher/watcher.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:watcher-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nppjn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-kuttl-db-sync-ghlxd_watcher-kuttl-default(b223213d-37ca-4b68-b871-e88a8c4811bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 16:01:57 crc kubenswrapper[4813]: E0320 16:01:57.355529 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-kuttl-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" podUID="b223213d-37ca-4b68-b871-e88a8c4811bf" Mar 20 16:01:58 crc kubenswrapper[4813]: E0320 16:01:58.114149 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-kuttl-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.147:5001/podified-master-centos10/openstack-watcher-api:watcher_latest\\\"\"" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" podUID="b223213d-37ca-4b68-b871-e88a8c4811bf" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.157946 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567042-xqvjm"] Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.165352 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.170462 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.170549 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.170775 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.184052 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567042-xqvjm"] Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.225045 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf729\" (UniqueName: \"kubernetes.io/projected/df1feb5f-9978-4d2a-aa1d-5fb701529f21-kube-api-access-rf729\") pod \"auto-csr-approver-29567042-xqvjm\" (UID: \"df1feb5f-9978-4d2a-aa1d-5fb701529f21\") " pod="openshift-infra/auto-csr-approver-29567042-xqvjm" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.326923 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf729\" (UniqueName: \"kubernetes.io/projected/df1feb5f-9978-4d2a-aa1d-5fb701529f21-kube-api-access-rf729\") pod \"auto-csr-approver-29567042-xqvjm\" (UID: \"df1feb5f-9978-4d2a-aa1d-5fb701529f21\") " pod="openshift-infra/auto-csr-approver-29567042-xqvjm" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.349809 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf729\" (UniqueName: \"kubernetes.io/projected/df1feb5f-9978-4d2a-aa1d-5fb701529f21-kube-api-access-rf729\") pod \"auto-csr-approver-29567042-xqvjm\" (UID: \"df1feb5f-9978-4d2a-aa1d-5fb701529f21\") " pod="openshift-infra/auto-csr-approver-29567042-xqvjm" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.488793 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" Mar 20 16:02:00 crc kubenswrapper[4813]: I0320 16:02:00.743955 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567042-xqvjm"] Mar 20 16:02:01 crc kubenswrapper[4813]: I0320 16:02:01.134387 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" event={"ID":"df1feb5f-9978-4d2a-aa1d-5fb701529f21","Type":"ContainerStarted","Data":"6216425878a9267005e3097a0f556ec6cd6680c169503608fed4f3ed1990f9f2"} Mar 20 16:02:02 crc kubenswrapper[4813]: I0320 16:02:02.143400 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" event={"ID":"df1feb5f-9978-4d2a-aa1d-5fb701529f21","Type":"ContainerStarted","Data":"9bb2db071164b6cefa4f445a6768f2913a8dfdeb3c7001d00f50b0e623519d87"} Mar 20 16:02:02 crc kubenswrapper[4813]: I0320 16:02:02.161271 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" podStartSLOduration=1.214080351 podStartE2EDuration="2.161250912s" podCreationTimestamp="2026-03-20 16:02:00 +0000 UTC" firstStartedPulling="2026-03-20 16:02:00.758727032 +0000 UTC m=+1450.181429893" lastFinishedPulling="2026-03-20 16:02:01.705897603 +0000 UTC m=+1451.128600454" observedRunningTime="2026-03-20 16:02:02.155223709 +0000 UTC m=+1451.577926550" watchObservedRunningTime="2026-03-20 16:02:02.161250912 +0000 UTC m=+1451.583953753" Mar 20 16:02:03 crc kubenswrapper[4813]: I0320 16:02:03.152774 4813 generic.go:334] "Generic (PLEG): container finished" podID="df1feb5f-9978-4d2a-aa1d-5fb701529f21" containerID="9bb2db071164b6cefa4f445a6768f2913a8dfdeb3c7001d00f50b0e623519d87" exitCode=0 Mar 20 16:02:03 crc kubenswrapper[4813]: I0320 16:02:03.152814 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" event={"ID":"df1feb5f-9978-4d2a-aa1d-5fb701529f21","Type":"ContainerDied","Data":"9bb2db071164b6cefa4f445a6768f2913a8dfdeb3c7001d00f50b0e623519d87"} Mar 20 16:02:04 crc kubenswrapper[4813]: I0320 16:02:04.428969 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" Mar 20 16:02:04 crc kubenswrapper[4813]: I0320 16:02:04.507523 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf729\" (UniqueName: \"kubernetes.io/projected/df1feb5f-9978-4d2a-aa1d-5fb701529f21-kube-api-access-rf729\") pod \"df1feb5f-9978-4d2a-aa1d-5fb701529f21\" (UID: \"df1feb5f-9978-4d2a-aa1d-5fb701529f21\") " Mar 20 16:02:04 crc kubenswrapper[4813]: I0320 16:02:04.514492 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df1feb5f-9978-4d2a-aa1d-5fb701529f21-kube-api-access-rf729" (OuterVolumeSpecName: "kube-api-access-rf729") pod "df1feb5f-9978-4d2a-aa1d-5fb701529f21" (UID: "df1feb5f-9978-4d2a-aa1d-5fb701529f21"). InnerVolumeSpecName "kube-api-access-rf729". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:02:04 crc kubenswrapper[4813]: I0320 16:02:04.609123 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf729\" (UniqueName: \"kubernetes.io/projected/df1feb5f-9978-4d2a-aa1d-5fb701529f21-kube-api-access-rf729\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:05 crc kubenswrapper[4813]: I0320 16:02:05.172312 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" event={"ID":"df1feb5f-9978-4d2a-aa1d-5fb701529f21","Type":"ContainerDied","Data":"6216425878a9267005e3097a0f556ec6cd6680c169503608fed4f3ed1990f9f2"} Mar 20 16:02:05 crc kubenswrapper[4813]: I0320 16:02:05.172364 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6216425878a9267005e3097a0f556ec6cd6680c169503608fed4f3ed1990f9f2" Mar 20 16:02:05 crc kubenswrapper[4813]: I0320 16:02:05.172830 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567042-xqvjm" Mar 20 16:02:05 crc kubenswrapper[4813]: I0320 16:02:05.507813 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567036-whmmc"] Mar 20 16:02:05 crc kubenswrapper[4813]: I0320 16:02:05.515137 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567036-whmmc"] Mar 20 16:02:07 crc kubenswrapper[4813]: I0320 16:02:07.276174 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e821fe55-f415-430a-be0c-d082bd778340" path="/var/lib/kubelet/pods/e821fe55-f415-430a-be0c-d082bd778340/volumes" Mar 20 16:02:13 crc kubenswrapper[4813]: I0320 16:02:13.243406 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" event={"ID":"b223213d-37ca-4b68-b871-e88a8c4811bf","Type":"ContainerStarted","Data":"36a14dcb7046c156b7b03ea6338b10c4287ac14dd18f2450ab26aea0258ded0d"} Mar 20 16:02:13 crc kubenswrapper[4813]: I0320 16:02:13.283471 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" podStartSLOduration=2.476236253 podStartE2EDuration="37.28345155s" podCreationTimestamp="2026-03-20 16:01:36 +0000 UTC" firstStartedPulling="2026-03-20 16:01:37.628928261 +0000 UTC m=+1427.051631102" lastFinishedPulling="2026-03-20 16:02:12.436143558 +0000 UTC m=+1461.858846399" observedRunningTime="2026-03-20 16:02:13.276868713 +0000 UTC m=+1462.699571554" watchObservedRunningTime="2026-03-20 16:02:13.28345155 +0000 UTC m=+1462.706154401" Mar 20 16:02:15 crc kubenswrapper[4813]: I0320 16:02:15.989019 4813 scope.go:117] "RemoveContainer" containerID="f996c5b2ba397eaee49a56bce470a1e2ec56d25a1d14a175e258cbf3cc9671d9" Mar 20 16:02:16 crc kubenswrapper[4813]: I0320 16:02:16.036290 4813 scope.go:117] "RemoveContainer" containerID="6b4a09e4cc381b2a119fec3135b2993692e07ef86e25af6e5375cc76503f2963" Mar 20 16:02:17 crc kubenswrapper[4813]: I0320 16:02:17.279012 4813 generic.go:334] "Generic (PLEG): container finished" podID="b223213d-37ca-4b68-b871-e88a8c4811bf" containerID="36a14dcb7046c156b7b03ea6338b10c4287ac14dd18f2450ab26aea0258ded0d" exitCode=0 Mar 20 16:02:17 crc kubenswrapper[4813]: I0320 16:02:17.279589 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" event={"ID":"b223213d-37ca-4b68-b871-e88a8c4811bf","Type":"ContainerDied","Data":"36a14dcb7046c156b7b03ea6338b10c4287ac14dd18f2450ab26aea0258ded0d"} Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.579178 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.635868 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-db-sync-config-data\") pod \"b223213d-37ca-4b68-b871-e88a8c4811bf\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.636157 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-config-data\") pod \"b223213d-37ca-4b68-b871-e88a8c4811bf\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.636243 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nppjn\" (UniqueName: \"kubernetes.io/projected/b223213d-37ca-4b68-b871-e88a8c4811bf-kube-api-access-nppjn\") pod \"b223213d-37ca-4b68-b871-e88a8c4811bf\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.636382 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-combined-ca-bundle\") pod \"b223213d-37ca-4b68-b871-e88a8c4811bf\" (UID: \"b223213d-37ca-4b68-b871-e88a8c4811bf\") " Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.641213 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b223213d-37ca-4b68-b871-e88a8c4811bf" (UID: "b223213d-37ca-4b68-b871-e88a8c4811bf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.641680 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b223213d-37ca-4b68-b871-e88a8c4811bf-kube-api-access-nppjn" (OuterVolumeSpecName: "kube-api-access-nppjn") pod "b223213d-37ca-4b68-b871-e88a8c4811bf" (UID: "b223213d-37ca-4b68-b871-e88a8c4811bf"). InnerVolumeSpecName "kube-api-access-nppjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.660083 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b223213d-37ca-4b68-b871-e88a8c4811bf" (UID: "b223213d-37ca-4b68-b871-e88a8c4811bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.700339 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-config-data" (OuterVolumeSpecName: "config-data") pod "b223213d-37ca-4b68-b871-e88a8c4811bf" (UID: "b223213d-37ca-4b68-b871-e88a8c4811bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.738688 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nppjn\" (UniqueName: \"kubernetes.io/projected/b223213d-37ca-4b68-b871-e88a8c4811bf-kube-api-access-nppjn\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.738729 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.738740 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:18 crc kubenswrapper[4813]: I0320 16:02:18.738751 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b223213d-37ca-4b68-b871-e88a8c4811bf-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.295188 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" event={"ID":"b223213d-37ca-4b68-b871-e88a8c4811bf","Type":"ContainerDied","Data":"1f10bf6e63c04ae2420f582a3e65d9fb2507bf74912fdaf06ed1aafb7e0534e8"} Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.295546 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f10bf6e63c04ae2420f582a3e65d9fb2507bf74912fdaf06ed1aafb7e0534e8" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.295251 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.599945 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:19 crc kubenswrapper[4813]: E0320 16:02:19.600326 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df1feb5f-9978-4d2a-aa1d-5fb701529f21" containerName="oc" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.600339 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="df1feb5f-9978-4d2a-aa1d-5fb701529f21" containerName="oc" Mar 20 16:02:19 crc kubenswrapper[4813]: E0320 16:02:19.600356 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b223213d-37ca-4b68-b871-e88a8c4811bf" containerName="watcher-kuttl-db-sync" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.600364 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b223213d-37ca-4b68-b871-e88a8c4811bf" containerName="watcher-kuttl-db-sync" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.600583 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b223213d-37ca-4b68-b871-e88a8c4811bf" containerName="watcher-kuttl-db-sync" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.600595 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="df1feb5f-9978-4d2a-aa1d-5fb701529f21" containerName="oc" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.602007 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.604649 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.605320 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-9s9qc" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.609383 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.610560 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.612212 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.617745 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.638406 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.653606 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.653810 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.653883 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc9777b-b253-4482-9703-88a641a076f6-logs\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.653981 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.660338 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v78mv\" (UniqueName: \"kubernetes.io/projected/7bc9777b-b253-4482-9703-88a641a076f6-kube-api-access-v78mv\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.660528 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.660601 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.660697 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.660787 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bbjh\" (UniqueName: \"kubernetes.io/projected/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-kube-api-access-6bbjh\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.693864 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.695177 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.701088 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.704201 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762590 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bbjh\" (UniqueName: \"kubernetes.io/projected/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-kube-api-access-6bbjh\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762678 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762734 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762761 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjsjr\" (UniqueName: \"kubernetes.io/projected/69eef677-38c1-48f8-8f7c-829780d8071e-kube-api-access-wjsjr\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762789 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762834 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc9777b-b253-4482-9703-88a641a076f6-logs\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762895 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762920 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.762975 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v78mv\" (UniqueName: \"kubernetes.io/projected/7bc9777b-b253-4482-9703-88a641a076f6-kube-api-access-v78mv\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.763022 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.763047 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.763075 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69eef677-38c1-48f8-8f7c-829780d8071e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.763101 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.763749 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc9777b-b253-4482-9703-88a641a076f6-logs\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.764241 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.767813 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.768133 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.768398 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.779223 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.780352 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.783120 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v78mv\" (UniqueName: \"kubernetes.io/projected/7bc9777b-b253-4482-9703-88a641a076f6-kube-api-access-v78mv\") pod \"watcher-kuttl-api-0\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.783593 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bbjh\" (UniqueName: \"kubernetes.io/projected/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-kube-api-access-6bbjh\") pod \"watcher-kuttl-applier-0\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.863950 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.864005 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.864025 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjsjr\" (UniqueName: \"kubernetes.io/projected/69eef677-38c1-48f8-8f7c-829780d8071e-kube-api-access-wjsjr\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.864066 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.864118 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69eef677-38c1-48f8-8f7c-829780d8071e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.864600 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69eef677-38c1-48f8-8f7c-829780d8071e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.867676 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.867876 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.867959 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.879807 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjsjr\" (UniqueName: \"kubernetes.io/projected/69eef677-38c1-48f8-8f7c-829780d8071e-kube-api-access-wjsjr\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.923958 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:19 crc kubenswrapper[4813]: I0320 16:02:19.939012 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:20 crc kubenswrapper[4813]: I0320 16:02:20.016137 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:20 crc kubenswrapper[4813]: I0320 16:02:20.628219 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:20 crc kubenswrapper[4813]: I0320 16:02:20.721737 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:20 crc kubenswrapper[4813]: I0320 16:02:20.787285 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.346121 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"69eef677-38c1-48f8-8f7c-829780d8071e","Type":"ContainerStarted","Data":"54ca52880f34dba944993af8a4c18e89e98c9aef2db144d131079449ce9d14d9"} Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.349013 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7bc9777b-b253-4482-9703-88a641a076f6","Type":"ContainerStarted","Data":"fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b"} Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.349060 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7bc9777b-b253-4482-9703-88a641a076f6","Type":"ContainerStarted","Data":"7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5"} Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.349070 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7bc9777b-b253-4482-9703-88a641a076f6","Type":"ContainerStarted","Data":"43e4ee3cd9e6a35810ef1a8ef084a4582c7fff7ab5509554ba7472d6434425c3"} Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.350363 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.351680 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.134:9322/\": dial tcp 10.217.0.134:9322: connect: connection refused" Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.352515 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5","Type":"ContainerStarted","Data":"c8782a8e091a4b570b9a9560a7b13e08737e5b06d0607f9a21a097dfc8340e84"} Mar 20 16:02:21 crc kubenswrapper[4813]: I0320 16:02:21.372879 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.372859205 podStartE2EDuration="2.372859205s" podCreationTimestamp="2026-03-20 16:02:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:02:21.3678749 +0000 UTC m=+1470.790577741" watchObservedRunningTime="2026-03-20 16:02:21.372859205 +0000 UTC m=+1470.795562046" Mar 20 16:02:23 crc kubenswrapper[4813]: I0320 16:02:23.373563 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"69eef677-38c1-48f8-8f7c-829780d8071e","Type":"ContainerStarted","Data":"8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80"} Mar 20 16:02:23 crc kubenswrapper[4813]: I0320 16:02:23.375374 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5","Type":"ContainerStarted","Data":"84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763"} Mar 20 16:02:23 crc kubenswrapper[4813]: I0320 16:02:23.400818 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=3.024019765 podStartE2EDuration="4.400801489s" podCreationTimestamp="2026-03-20 16:02:19 +0000 UTC" firstStartedPulling="2026-03-20 16:02:20.791943013 +0000 UTC m=+1470.214645854" lastFinishedPulling="2026-03-20 16:02:22.168724737 +0000 UTC m=+1471.591427578" observedRunningTime="2026-03-20 16:02:23.397980593 +0000 UTC m=+1472.820683474" watchObservedRunningTime="2026-03-20 16:02:23.400801489 +0000 UTC m=+1472.823504320" Mar 20 16:02:23 crc kubenswrapper[4813]: I0320 16:02:23.429690 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=2.901541873 podStartE2EDuration="4.429671209s" podCreationTimestamp="2026-03-20 16:02:19 +0000 UTC" firstStartedPulling="2026-03-20 16:02:20.636588663 +0000 UTC m=+1470.059291504" lastFinishedPulling="2026-03-20 16:02:22.164718009 +0000 UTC m=+1471.587420840" observedRunningTime="2026-03-20 16:02:23.423458671 +0000 UTC m=+1472.846161532" watchObservedRunningTime="2026-03-20 16:02:23.429671209 +0000 UTC m=+1472.852374050" Mar 20 16:02:24 crc kubenswrapper[4813]: I0320 16:02:24.640233 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:24 crc kubenswrapper[4813]: I0320 16:02:24.924982 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:24 crc kubenswrapper[4813]: I0320 16:02:24.939463 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:29 crc kubenswrapper[4813]: I0320 16:02:29.924518 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:29 crc kubenswrapper[4813]: I0320 16:02:29.940251 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:29 crc kubenswrapper[4813]: I0320 16:02:29.941023 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:29 crc kubenswrapper[4813]: I0320 16:02:29.967427 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:30 crc kubenswrapper[4813]: I0320 16:02:30.017618 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:30 crc kubenswrapper[4813]: I0320 16:02:30.039901 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:30 crc kubenswrapper[4813]: I0320 16:02:30.427360 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:30 crc kubenswrapper[4813]: I0320 16:02:30.432771 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:30 crc kubenswrapper[4813]: I0320 16:02:30.464092 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:30 crc kubenswrapper[4813]: I0320 16:02:30.464682 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:32 crc kubenswrapper[4813]: I0320 16:02:32.538446 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:02:32 crc kubenswrapper[4813]: I0320 16:02:32.539104 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-central-agent" containerID="cri-o://f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" gracePeriod=30 Mar 20 16:02:32 crc kubenswrapper[4813]: I0320 16:02:32.539601 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="proxy-httpd" containerID="cri-o://984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" gracePeriod=30 Mar 20 16:02:32 crc kubenswrapper[4813]: I0320 16:02:32.539662 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="sg-core" containerID="cri-o://1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" gracePeriod=30 Mar 20 16:02:32 crc kubenswrapper[4813]: I0320 16:02:32.539701 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-notification-agent" containerID="cri-o://890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" gracePeriod=30 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.364242 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b7z4s"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.365926 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.378370 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7z4s"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.421800 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zscrl\" (UniqueName: \"kubernetes.io/projected/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-kube-api-access-zscrl\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.423041 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-catalog-content\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.423109 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-utilities\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.435613 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461267 4813 generic.go:334] "Generic (PLEG): container finished" podID="2601574a-36c2-45b2-9369-b3804694a6e5" containerID="984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" exitCode=0 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461303 4813 generic.go:334] "Generic (PLEG): container finished" podID="2601574a-36c2-45b2-9369-b3804694a6e5" containerID="1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" exitCode=2 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461315 4813 generic.go:334] "Generic (PLEG): container finished" podID="2601574a-36c2-45b2-9369-b3804694a6e5" containerID="890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" exitCode=0 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461320 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461342 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerDied","Data":"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603"} Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461377 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerDied","Data":"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2"} Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461390 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerDied","Data":"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42"} Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461405 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerDied","Data":"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab"} Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461403 4813 scope.go:117] "RemoveContainer" containerID="984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461324 4813 generic.go:334] "Generic (PLEG): container finished" podID="2601574a-36c2-45b2-9369-b3804694a6e5" containerID="f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" exitCode=0 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.461506 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2601574a-36c2-45b2-9369-b3804694a6e5","Type":"ContainerDied","Data":"a535084ef304e74fec3e9137806bde0e5e81c9be812c034a6f96771f2ed819f2"} Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.485437 4813 scope.go:117] "RemoveContainer" containerID="1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.519871 4813 scope.go:117] "RemoveContainer" containerID="890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523458 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-combined-ca-bundle\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523507 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-sg-core-conf-yaml\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523541 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-run-httpd\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523615 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-log-httpd\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523644 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-config-data\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523681 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-ceilometer-tls-certs\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523718 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gdwh\" (UniqueName: \"kubernetes.io/projected/2601574a-36c2-45b2-9369-b3804694a6e5-kube-api-access-2gdwh\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523793 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-scripts\") pod \"2601574a-36c2-45b2-9369-b3804694a6e5\" (UID: \"2601574a-36c2-45b2-9369-b3804694a6e5\") " Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.523957 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zscrl\" (UniqueName: \"kubernetes.io/projected/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-kube-api-access-zscrl\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.524029 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-catalog-content\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.524066 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-utilities\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.524511 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-utilities\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.525227 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-catalog-content\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.525373 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.526372 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.529018 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2601574a-36c2-45b2-9369-b3804694a6e5-kube-api-access-2gdwh" (OuterVolumeSpecName: "kube-api-access-2gdwh") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "kube-api-access-2gdwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.529364 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-scripts" (OuterVolumeSpecName: "scripts") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.546307 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zscrl\" (UniqueName: \"kubernetes.io/projected/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-kube-api-access-zscrl\") pod \"redhat-marketplace-b7z4s\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.558112 4813 scope.go:117] "RemoveContainer" containerID="f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.570885 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.571126 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" containerName="watcher-applier" containerID="cri-o://84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" gracePeriod=30 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.602426 4813 scope.go:117] "RemoveContainer" containerID="984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.602952 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.603428 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": container with ID starting with 984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603 not found: ID does not exist" containerID="984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.603498 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603"} err="failed to get container status \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": rpc error: code = NotFound desc = could not find container \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": container with ID starting with 984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.603528 4813 scope.go:117] "RemoveContainer" containerID="1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.603896 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": container with ID starting with 1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2 not found: ID does not exist" containerID="1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.603921 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2"} err="failed to get container status \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": rpc error: code = NotFound desc = could not find container \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": container with ID starting with 1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.603946 4813 scope.go:117] "RemoveContainer" containerID="890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.604175 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": container with ID starting with 890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42 not found: ID does not exist" containerID="890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.604199 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42"} err="failed to get container status \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": rpc error: code = NotFound desc = could not find container \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": container with ID starting with 890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.604219 4813 scope.go:117] "RemoveContainer" containerID="f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.604411 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": container with ID starting with f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab not found: ID does not exist" containerID="f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.604436 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab"} err="failed to get container status \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": rpc error: code = NotFound desc = could not find container \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": container with ID starting with f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.604454 4813 scope.go:117] "RemoveContainer" containerID="984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.604725 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603"} err="failed to get container status \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": rpc error: code = NotFound desc = could not find container \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": container with ID starting with 984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.604744 4813 scope.go:117] "RemoveContainer" containerID="1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.607932 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2"} err="failed to get container status \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": rpc error: code = NotFound desc = could not find container \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": container with ID starting with 1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.607957 4813 scope.go:117] "RemoveContainer" containerID="890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.608321 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42"} err="failed to get container status \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": rpc error: code = NotFound desc = could not find container \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": container with ID starting with 890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.608338 4813 scope.go:117] "RemoveContainer" containerID="f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.612622 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab"} err="failed to get container status \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": rpc error: code = NotFound desc = could not find container \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": container with ID starting with f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.612663 4813 scope.go:117] "RemoveContainer" containerID="984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.613161 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603"} err="failed to get container status \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": rpc error: code = NotFound desc = could not find container \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": container with ID starting with 984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.613190 4813 scope.go:117] "RemoveContainer" containerID="1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.615724 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2"} err="failed to get container status \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": rpc error: code = NotFound desc = could not find container \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": container with ID starting with 1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.615756 4813 scope.go:117] "RemoveContainer" containerID="890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.617252 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42"} err="failed to get container status \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": rpc error: code = NotFound desc = could not find container \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": container with ID starting with 890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.617278 4813 scope.go:117] "RemoveContainer" containerID="f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.618061 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.618300 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-kuttl-api-log" containerID="cri-o://7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5" gracePeriod=30 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.618712 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-api" containerID="cri-o://fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b" gracePeriod=30 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.619009 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab"} err="failed to get container status \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": rpc error: code = NotFound desc = could not find container \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": container with ID starting with f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.619048 4813 scope.go:117] "RemoveContainer" containerID="984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.619423 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603"} err="failed to get container status \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": rpc error: code = NotFound desc = could not find container \"984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603\": container with ID starting with 984ba830e41f7096ce228b41ad80e56f84669a60043b099028af190f75a12603 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.619442 4813 scope.go:117] "RemoveContainer" containerID="1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.625658 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.625830 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="69eef677-38c1-48f8-8f7c-829780d8071e" containerName="watcher-decision-engine" containerID="cri-o://8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80" gracePeriod=30 Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.628654 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gdwh\" (UniqueName: \"kubernetes.io/projected/2601574a-36c2-45b2-9369-b3804694a6e5-kube-api-access-2gdwh\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.628677 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.628688 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.628696 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.628704 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2601574a-36c2-45b2-9369-b3804694a6e5-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.629669 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2"} err="failed to get container status \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": rpc error: code = NotFound desc = could not find container \"1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2\": container with ID starting with 1c31a3f9ad06e3bf11fe51a89da908963a71582a644fd7e7366ee0423204a6a2 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.629702 4813 scope.go:117] "RemoveContainer" containerID="890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.630382 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42"} err="failed to get container status \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": rpc error: code = NotFound desc = could not find container \"890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42\": container with ID starting with 890df9f5e3e1b8adf408c1943317e320d3848575ec866b835211b79828dacb42 not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.630403 4813 scope.go:117] "RemoveContainer" containerID="f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.633927 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab"} err="failed to get container status \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": rpc error: code = NotFound desc = could not find container \"f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab\": container with ID starting with f5b4c4a073f3bae6985ba8193c159a4d4334c558ac8257568da7feb10a9cdaab not found: ID does not exist" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.634107 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.640064 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.655264 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-config-data" (OuterVolumeSpecName: "config-data") pod "2601574a-36c2-45b2-9369-b3804694a6e5" (UID: "2601574a-36c2-45b2-9369-b3804694a6e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.730783 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.730815 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.730824 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2601574a-36c2-45b2-9369-b3804694a6e5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.730946 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.847017 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.872074 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.882665 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.883164 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="sg-core" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883186 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="sg-core" Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.883215 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-notification-agent" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883225 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-notification-agent" Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.883240 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-central-agent" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883249 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-central-agent" Mar 20 16:02:33 crc kubenswrapper[4813]: E0320 16:02:33.883264 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="proxy-httpd" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883273 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="proxy-httpd" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883450 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="sg-core" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883467 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-notification-agent" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883565 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="ceilometer-central-agent" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.883581 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" containerName="proxy-httpd" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.889905 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.893848 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.894026 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.894129 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:02:33 crc kubenswrapper[4813]: I0320 16:02:33.894085 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.043356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-run-httpd\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.043423 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.044604 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-scripts\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.044923 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-log-httpd\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.044968 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.045068 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.045123 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-config-data\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.045256 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgf4l\" (UniqueName: \"kubernetes.io/projected/36a54386-f521-421f-8dd9-48393304cec4-kube-api-access-mgf4l\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147282 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-log-httpd\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147346 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147402 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147434 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-config-data\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147500 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgf4l\" (UniqueName: \"kubernetes.io/projected/36a54386-f521-421f-8dd9-48393304cec4-kube-api-access-mgf4l\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147526 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-run-httpd\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147543 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147572 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-scripts\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.147871 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-log-httpd\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.148002 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-run-httpd\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.153445 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.154187 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-scripts\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.154834 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-config-data\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.157754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.157991 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.174230 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgf4l\" (UniqueName: \"kubernetes.io/projected/36a54386-f521-421f-8dd9-48393304cec4-kube-api-access-mgf4l\") pod \"ceilometer-0\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.224006 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.241015 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7z4s"] Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.474201 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7z4s" event={"ID":"9c6c4396-5e8f-4396-9493-c1ddad2c87a5","Type":"ContainerStarted","Data":"c88d25a2394ac374127a53781101774b4a6b066aac51905ec4c8181bd025cb6c"} Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.474531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7z4s" event={"ID":"9c6c4396-5e8f-4396-9493-c1ddad2c87a5","Type":"ContainerStarted","Data":"545585ea69e64e3e221f95a0e52f55f8e41cae8d516572db830f09b126e0d350"} Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.484873 4813 generic.go:334] "Generic (PLEG): container finished" podID="7bc9777b-b253-4482-9703-88a641a076f6" containerID="7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5" exitCode=143 Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.484949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7bc9777b-b253-4482-9703-88a641a076f6","Type":"ContainerDied","Data":"7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5"} Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.711577 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:02:34 crc kubenswrapper[4813]: W0320 16:02:34.718515 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36a54386_f521_421f_8dd9_48393304cec4.slice/crio-edb84e850a811c15e23c94cedaac15e9fcdde318f3c6a1ba069c60c0da1900ff WatchSource:0}: Error finding container edb84e850a811c15e23c94cedaac15e9fcdde318f3c6a1ba069c60c0da1900ff: Status 404 returned error can't find the container with id edb84e850a811c15e23c94cedaac15e9fcdde318f3c6a1ba069c60c0da1900ff Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.886656 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:34 crc kubenswrapper[4813]: E0320 16:02:34.940889 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:02:34 crc kubenswrapper[4813]: E0320 16:02:34.942648 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:02:34 crc kubenswrapper[4813]: E0320 16:02:34.944040 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:02:34 crc kubenswrapper[4813]: E0320 16:02:34.944106 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" containerName="watcher-applier" Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.962473 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-combined-ca-bundle\") pod \"7bc9777b-b253-4482-9703-88a641a076f6\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.962600 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v78mv\" (UniqueName: \"kubernetes.io/projected/7bc9777b-b253-4482-9703-88a641a076f6-kube-api-access-v78mv\") pod \"7bc9777b-b253-4482-9703-88a641a076f6\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.962713 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc9777b-b253-4482-9703-88a641a076f6-logs\") pod \"7bc9777b-b253-4482-9703-88a641a076f6\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.962813 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-config-data\") pod \"7bc9777b-b253-4482-9703-88a641a076f6\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.962888 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-custom-prometheus-ca\") pod \"7bc9777b-b253-4482-9703-88a641a076f6\" (UID: \"7bc9777b-b253-4482-9703-88a641a076f6\") " Mar 20 16:02:34 crc kubenswrapper[4813]: I0320 16:02:34.963344 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bc9777b-b253-4482-9703-88a641a076f6-logs" (OuterVolumeSpecName: "logs") pod "7bc9777b-b253-4482-9703-88a641a076f6" (UID: "7bc9777b-b253-4482-9703-88a641a076f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.008173 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bc9777b-b253-4482-9703-88a641a076f6-kube-api-access-v78mv" (OuterVolumeSpecName: "kube-api-access-v78mv") pod "7bc9777b-b253-4482-9703-88a641a076f6" (UID: "7bc9777b-b253-4482-9703-88a641a076f6"). InnerVolumeSpecName "kube-api-access-v78mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.008684 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bc9777b-b253-4482-9703-88a641a076f6" (UID: "7bc9777b-b253-4482-9703-88a641a076f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.013165 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "7bc9777b-b253-4482-9703-88a641a076f6" (UID: "7bc9777b-b253-4482-9703-88a641a076f6"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.031651 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-config-data" (OuterVolumeSpecName: "config-data") pod "7bc9777b-b253-4482-9703-88a641a076f6" (UID: "7bc9777b-b253-4482-9703-88a641a076f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.064625 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc9777b-b253-4482-9703-88a641a076f6-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.064847 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.064935 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.065043 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc9777b-b253-4482-9703-88a641a076f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.065120 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v78mv\" (UniqueName: \"kubernetes.io/projected/7bc9777b-b253-4482-9703-88a641a076f6-kube-api-access-v78mv\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.285475 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2601574a-36c2-45b2-9369-b3804694a6e5" path="/var/lib/kubelet/pods/2601574a-36c2-45b2-9369-b3804694a6e5/volumes" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.495824 4813 generic.go:334] "Generic (PLEG): container finished" podID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerID="c88d25a2394ac374127a53781101774b4a6b066aac51905ec4c8181bd025cb6c" exitCode=0 Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.495902 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7z4s" event={"ID":"9c6c4396-5e8f-4396-9493-c1ddad2c87a5","Type":"ContainerDied","Data":"c88d25a2394ac374127a53781101774b4a6b066aac51905ec4c8181bd025cb6c"} Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.500096 4813 generic.go:334] "Generic (PLEG): container finished" podID="7bc9777b-b253-4482-9703-88a641a076f6" containerID="fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b" exitCode=0 Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.500190 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7bc9777b-b253-4482-9703-88a641a076f6","Type":"ContainerDied","Data":"fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b"} Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.500215 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7bc9777b-b253-4482-9703-88a641a076f6","Type":"ContainerDied","Data":"43e4ee3cd9e6a35810ef1a8ef084a4582c7fff7ab5509554ba7472d6434425c3"} Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.500233 4813 scope.go:117] "RemoveContainer" containerID="fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.500232 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.505964 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerStarted","Data":"edb84e850a811c15e23c94cedaac15e9fcdde318f3c6a1ba069c60c0da1900ff"} Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.532706 4813 scope.go:117] "RemoveContainer" containerID="7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.532877 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.541243 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.561568 4813 scope.go:117] "RemoveContainer" containerID="fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b" Mar 20 16:02:35 crc kubenswrapper[4813]: E0320 16:02:35.562186 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b\": container with ID starting with fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b not found: ID does not exist" containerID="fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.562236 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b"} err="failed to get container status \"fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b\": rpc error: code = NotFound desc = could not find container \"fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b\": container with ID starting with fa69ba9dff8bfd9291155c0b375f3f435b91f6c5c9c693670cfdd135d4e3783b not found: ID does not exist" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.562270 4813 scope.go:117] "RemoveContainer" containerID="7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5" Mar 20 16:02:35 crc kubenswrapper[4813]: E0320 16:02:35.562698 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5\": container with ID starting with 7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5 not found: ID does not exist" containerID="7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.562724 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5"} err="failed to get container status \"7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5\": rpc error: code = NotFound desc = could not find container \"7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5\": container with ID starting with 7273c3b041e9d799e947492f161c9f43df85683514394370832946ee6e15b0b5 not found: ID does not exist" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.566674 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:35 crc kubenswrapper[4813]: E0320 16:02:35.567094 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-kuttl-api-log" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.567117 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-kuttl-api-log" Mar 20 16:02:35 crc kubenswrapper[4813]: E0320 16:02:35.567137 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-api" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.567146 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-api" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.567291 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-api" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.567306 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc9777b-b253-4482-9703-88a641a076f6" containerName="watcher-kuttl-api-log" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.568352 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.571675 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.586795 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.677301 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-822m9\" (UniqueName: \"kubernetes.io/projected/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-kube-api-access-822m9\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.677397 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-logs\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.677514 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.677547 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.677584 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.779014 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-822m9\" (UniqueName: \"kubernetes.io/projected/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-kube-api-access-822m9\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.779334 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-logs\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.779397 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.779421 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.779448 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.779674 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-logs\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.783439 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.784678 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.787040 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.802367 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-822m9\" (UniqueName: \"kubernetes.io/projected/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-kube-api-access-822m9\") pod \"watcher-kuttl-api-0\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:35 crc kubenswrapper[4813]: I0320 16:02:35.890752 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:36 crc kubenswrapper[4813]: I0320 16:02:36.416339 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:02:36 crc kubenswrapper[4813]: W0320 16:02:36.427361 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6fc429d_e3c2_40bc_8146_107dbf8dc43e.slice/crio-850441be6332afff7ac7263a5b7d431e93eec1e37abf732e6e9ba16308678613 WatchSource:0}: Error finding container 850441be6332afff7ac7263a5b7d431e93eec1e37abf732e6e9ba16308678613: Status 404 returned error can't find the container with id 850441be6332afff7ac7263a5b7d431e93eec1e37abf732e6e9ba16308678613 Mar 20 16:02:36 crc kubenswrapper[4813]: I0320 16:02:36.525420 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"c6fc429d-e3c2-40bc-8146-107dbf8dc43e","Type":"ContainerStarted","Data":"850441be6332afff7ac7263a5b7d431e93eec1e37abf732e6e9ba16308678613"} Mar 20 16:02:36 crc kubenswrapper[4813]: I0320 16:02:36.531615 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerStarted","Data":"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45"} Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.275985 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bc9777b-b253-4482-9703-88a641a076f6" path="/var/lib/kubelet/pods/7bc9777b-b253-4482-9703-88a641a076f6/volumes" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.405761 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.513212 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-config-data\") pod \"69eef677-38c1-48f8-8f7c-829780d8071e\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.513331 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-combined-ca-bundle\") pod \"69eef677-38c1-48f8-8f7c-829780d8071e\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.513525 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69eef677-38c1-48f8-8f7c-829780d8071e-logs\") pod \"69eef677-38c1-48f8-8f7c-829780d8071e\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.513576 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-custom-prometheus-ca\") pod \"69eef677-38c1-48f8-8f7c-829780d8071e\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.513605 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjsjr\" (UniqueName: \"kubernetes.io/projected/69eef677-38c1-48f8-8f7c-829780d8071e-kube-api-access-wjsjr\") pod \"69eef677-38c1-48f8-8f7c-829780d8071e\" (UID: \"69eef677-38c1-48f8-8f7c-829780d8071e\") " Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.513947 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69eef677-38c1-48f8-8f7c-829780d8071e-logs" (OuterVolumeSpecName: "logs") pod "69eef677-38c1-48f8-8f7c-829780d8071e" (UID: "69eef677-38c1-48f8-8f7c-829780d8071e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.516784 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69eef677-38c1-48f8-8f7c-829780d8071e-kube-api-access-wjsjr" (OuterVolumeSpecName: "kube-api-access-wjsjr") pod "69eef677-38c1-48f8-8f7c-829780d8071e" (UID: "69eef677-38c1-48f8-8f7c-829780d8071e"). InnerVolumeSpecName "kube-api-access-wjsjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.538094 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "69eef677-38c1-48f8-8f7c-829780d8071e" (UID: "69eef677-38c1-48f8-8f7c-829780d8071e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.538170 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69eef677-38c1-48f8-8f7c-829780d8071e" (UID: "69eef677-38c1-48f8-8f7c-829780d8071e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.545291 4813 generic.go:334] "Generic (PLEG): container finished" podID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerID="0434699ba4f3b17ce706bb8820b0732b653744fbd62c0ab3057ed3f11aa434e8" exitCode=0 Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.545350 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7z4s" event={"ID":"9c6c4396-5e8f-4396-9493-c1ddad2c87a5","Type":"ContainerDied","Data":"0434699ba4f3b17ce706bb8820b0732b653744fbd62c0ab3057ed3f11aa434e8"} Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.550926 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerStarted","Data":"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4"} Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.552656 4813 generic.go:334] "Generic (PLEG): container finished" podID="69eef677-38c1-48f8-8f7c-829780d8071e" containerID="8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80" exitCode=0 Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.552711 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"69eef677-38c1-48f8-8f7c-829780d8071e","Type":"ContainerDied","Data":"8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80"} Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.552735 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"69eef677-38c1-48f8-8f7c-829780d8071e","Type":"ContainerDied","Data":"54ca52880f34dba944993af8a4c18e89e98c9aef2db144d131079449ce9d14d9"} Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.552754 4813 scope.go:117] "RemoveContainer" containerID="8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.552960 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.555862 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"c6fc429d-e3c2-40bc-8146-107dbf8dc43e","Type":"ContainerStarted","Data":"8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79"} Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.555901 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"c6fc429d-e3c2-40bc-8146-107dbf8dc43e","Type":"ContainerStarted","Data":"6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b"} Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.557128 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.598604 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-config-data" (OuterVolumeSpecName: "config-data") pod "69eef677-38c1-48f8-8f7c-829780d8071e" (UID: "69eef677-38c1-48f8-8f7c-829780d8071e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.599961 4813 scope.go:117] "RemoveContainer" containerID="8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80" Mar 20 16:02:37 crc kubenswrapper[4813]: E0320 16:02:37.602059 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80\": container with ID starting with 8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80 not found: ID does not exist" containerID="8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.602102 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80"} err="failed to get container status \"8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80\": rpc error: code = NotFound desc = could not find container \"8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80\": container with ID starting with 8ffec7f5a5b804623cf3eec05c7df03b3abec57f920503a35c825136a8f88a80 not found: ID does not exist" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.621274 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.621511 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjsjr\" (UniqueName: \"kubernetes.io/projected/69eef677-38c1-48f8-8f7c-829780d8071e-kube-api-access-wjsjr\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.621598 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.621673 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69eef677-38c1-48f8-8f7c-829780d8071e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.621766 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69eef677-38c1-48f8-8f7c-829780d8071e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.623255 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.623244257 podStartE2EDuration="2.623244257s" podCreationTimestamp="2026-03-20 16:02:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:02:37.621184612 +0000 UTC m=+1487.043887473" watchObservedRunningTime="2026-03-20 16:02:37.623244257 +0000 UTC m=+1487.045947098" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.891527 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.900935 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.924891 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:37 crc kubenswrapper[4813]: E0320 16:02:37.925250 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69eef677-38c1-48f8-8f7c-829780d8071e" containerName="watcher-decision-engine" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.925272 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="69eef677-38c1-48f8-8f7c-829780d8071e" containerName="watcher-decision-engine" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.925436 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="69eef677-38c1-48f8-8f7c-829780d8071e" containerName="watcher-decision-engine" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.925967 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.930597 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:02:37 crc kubenswrapper[4813]: I0320 16:02:37.946843 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.026916 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.027039 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.027070 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn4m2\" (UniqueName: \"kubernetes.io/projected/a6bcc686-f006-49f4-9451-8184fb2cce32-kube-api-access-tn4m2\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.027089 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.027104 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6bcc686-f006-49f4-9451-8184fb2cce32-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.128858 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.129228 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn4m2\" (UniqueName: \"kubernetes.io/projected/a6bcc686-f006-49f4-9451-8184fb2cce32-kube-api-access-tn4m2\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.129343 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.129453 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6bcc686-f006-49f4-9451-8184fb2cce32-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.129896 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6bcc686-f006-49f4-9451-8184fb2cce32-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.130118 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.135915 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.135944 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.136202 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.151126 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn4m2\" (UniqueName: \"kubernetes.io/projected/a6bcc686-f006-49f4-9451-8184fb2cce32-kube-api-access-tn4m2\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.240358 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.567172 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7z4s" event={"ID":"9c6c4396-5e8f-4396-9493-c1ddad2c87a5","Type":"ContainerStarted","Data":"81a8a388794276d4027ed025db720145ccce111c5e03f204dc63cc1de9aa8815"} Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.570673 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerStarted","Data":"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a"} Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.591150 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b7z4s" podStartSLOduration=2.931009838 podStartE2EDuration="5.59113132s" podCreationTimestamp="2026-03-20 16:02:33 +0000 UTC" firstStartedPulling="2026-03-20 16:02:35.49715084 +0000 UTC m=+1484.919853681" lastFinishedPulling="2026-03-20 16:02:38.157272312 +0000 UTC m=+1487.579975163" observedRunningTime="2026-03-20 16:02:38.588671693 +0000 UTC m=+1488.011374534" watchObservedRunningTime="2026-03-20 16:02:38.59113132 +0000 UTC m=+1488.013834161" Mar 20 16:02:38 crc kubenswrapper[4813]: I0320 16:02:38.690332 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.276761 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69eef677-38c1-48f8-8f7c-829780d8071e" path="/var/lib/kubelet/pods/69eef677-38c1-48f8-8f7c-829780d8071e/volumes" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.417083 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.559996 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bbjh\" (UniqueName: \"kubernetes.io/projected/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-kube-api-access-6bbjh\") pod \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.560380 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-combined-ca-bundle\") pod \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.560833 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-config-data\") pod \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.561088 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-logs\") pod \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\" (UID: \"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5\") " Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.561694 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-logs" (OuterVolumeSpecName: "logs") pod "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" (UID: "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.563781 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-kube-api-access-6bbjh" (OuterVolumeSpecName: "kube-api-access-6bbjh") pod "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" (UID: "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5"). InnerVolumeSpecName "kube-api-access-6bbjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.588531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"a6bcc686-f006-49f4-9451-8184fb2cce32","Type":"ContainerStarted","Data":"99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05"} Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.588579 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"a6bcc686-f006-49f4-9451-8184fb2cce32","Type":"ContainerStarted","Data":"c8bf4fa344c0f66589051dd309f1429d51de3dca524131661601bcb098a0bc99"} Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.593047 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerStarted","Data":"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d"} Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.593842 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.594955 4813 generic.go:334] "Generic (PLEG): container finished" podID="90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" containerID="84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" exitCode=0 Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.595447 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.595699 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5","Type":"ContainerDied","Data":"84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763"} Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.595760 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"90ecc84c-c65b-463f-8c1f-e0fa9d950dc5","Type":"ContainerDied","Data":"c8782a8e091a4b570b9a9560a7b13e08737e5b06d0607f9a21a097dfc8340e84"} Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.595790 4813 scope.go:117] "RemoveContainer" containerID="84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.604988 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.604970709 podStartE2EDuration="2.604970709s" podCreationTimestamp="2026-03-20 16:02:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:02:39.604799114 +0000 UTC m=+1489.027501955" watchObservedRunningTime="2026-03-20 16:02:39.604970709 +0000 UTC m=+1489.027673550" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.620938 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" (UID: "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.626725 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-config-data" (OuterVolumeSpecName: "config-data") pod "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" (UID: "90ecc84c-c65b-463f-8c1f-e0fa9d950dc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.626869 4813 scope.go:117] "RemoveContainer" containerID="84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" Mar 20 16:02:39 crc kubenswrapper[4813]: E0320 16:02:39.627275 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763\": container with ID starting with 84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763 not found: ID does not exist" containerID="84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.627327 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763"} err="failed to get container status \"84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763\": rpc error: code = NotFound desc = could not find container \"84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763\": container with ID starting with 84c0b2f4801f755eb6b1fe229d1c1cabab1e5559d3d037c5db12ddf9da819763 not found: ID does not exist" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.662982 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.663035 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.663056 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bbjh\" (UniqueName: \"kubernetes.io/projected/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-kube-api-access-6bbjh\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.663074 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.928252 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.425707513 podStartE2EDuration="6.928234679s" podCreationTimestamp="2026-03-20 16:02:33 +0000 UTC" firstStartedPulling="2026-03-20 16:02:34.721194126 +0000 UTC m=+1484.143896967" lastFinishedPulling="2026-03-20 16:02:39.223721292 +0000 UTC m=+1488.646424133" observedRunningTime="2026-03-20 16:02:39.627118581 +0000 UTC m=+1489.049821422" watchObservedRunningTime="2026-03-20 16:02:39.928234679 +0000 UTC m=+1489.350937520" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.934124 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.942336 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.962128 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:39 crc kubenswrapper[4813]: E0320 16:02:39.962475 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" containerName="watcher-applier" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.962503 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" containerName="watcher-applier" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.962648 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" containerName="watcher-applier" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.963163 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.969498 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:02:39 crc kubenswrapper[4813]: I0320 16:02:39.973147 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.068610 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.068719 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.068845 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5081b10c-49a8-415e-bffa-ed620820005f-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.068922 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r857r\" (UniqueName: \"kubernetes.io/projected/5081b10c-49a8-415e-bffa-ed620820005f-kube-api-access-r857r\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.170932 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5081b10c-49a8-415e-bffa-ed620820005f-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.171013 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r857r\" (UniqueName: \"kubernetes.io/projected/5081b10c-49a8-415e-bffa-ed620820005f-kube-api-access-r857r\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.171105 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.171148 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.172028 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5081b10c-49a8-415e-bffa-ed620820005f-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.174998 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.175470 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.195205 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r857r\" (UniqueName: \"kubernetes.io/projected/5081b10c-49a8-415e-bffa-ed620820005f-kube-api-access-r857r\") pod \"watcher-kuttl-applier-0\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.277668 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.291967 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.731820 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:02:40 crc kubenswrapper[4813]: I0320 16:02:40.891422 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:41 crc kubenswrapper[4813]: I0320 16:02:41.283408 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90ecc84c-c65b-463f-8c1f-e0fa9d950dc5" path="/var/lib/kubelet/pods/90ecc84c-c65b-463f-8c1f-e0fa9d950dc5/volumes" Mar 20 16:02:41 crc kubenswrapper[4813]: I0320 16:02:41.618293 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5081b10c-49a8-415e-bffa-ed620820005f","Type":"ContainerStarted","Data":"533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1"} Mar 20 16:02:41 crc kubenswrapper[4813]: I0320 16:02:41.618767 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5081b10c-49a8-415e-bffa-ed620820005f","Type":"ContainerStarted","Data":"0192af7b2ec3a3468cb3ada4ace12e901d67cfda4086626b7364fbde130b38b2"} Mar 20 16:02:41 crc kubenswrapper[4813]: I0320 16:02:41.645218 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=2.645197176 podStartE2EDuration="2.645197176s" podCreationTimestamp="2026-03-20 16:02:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:02:41.640934521 +0000 UTC m=+1491.063637372" watchObservedRunningTime="2026-03-20 16:02:41.645197176 +0000 UTC m=+1491.067900027" Mar 20 16:02:43 crc kubenswrapper[4813]: I0320 16:02:43.732346 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:43 crc kubenswrapper[4813]: I0320 16:02:43.732678 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:43 crc kubenswrapper[4813]: I0320 16:02:43.786961 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:44 crc kubenswrapper[4813]: I0320 16:02:44.680181 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:45 crc kubenswrapper[4813]: I0320 16:02:45.278106 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:45 crc kubenswrapper[4813]: I0320 16:02:45.891204 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:45 crc kubenswrapper[4813]: I0320 16:02:45.900077 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:46 crc kubenswrapper[4813]: I0320 16:02:46.659810 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.328653 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7z4s"] Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.328978 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b7z4s" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="registry-server" containerID="cri-o://81a8a388794276d4027ed025db720145ccce111c5e03f204dc63cc1de9aa8815" gracePeriod=2 Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.670350 4813 generic.go:334] "Generic (PLEG): container finished" podID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerID="81a8a388794276d4027ed025db720145ccce111c5e03f204dc63cc1de9aa8815" exitCode=0 Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.670435 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7z4s" event={"ID":"9c6c4396-5e8f-4396-9493-c1ddad2c87a5","Type":"ContainerDied","Data":"81a8a388794276d4027ed025db720145ccce111c5e03f204dc63cc1de9aa8815"} Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.786544 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.904657 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zscrl\" (UniqueName: \"kubernetes.io/projected/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-kube-api-access-zscrl\") pod \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.904793 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-catalog-content\") pod \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.904904 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-utilities\") pod \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\" (UID: \"9c6c4396-5e8f-4396-9493-c1ddad2c87a5\") " Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.905543 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-utilities" (OuterVolumeSpecName: "utilities") pod "9c6c4396-5e8f-4396-9493-c1ddad2c87a5" (UID: "9c6c4396-5e8f-4396-9493-c1ddad2c87a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.910843 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-kube-api-access-zscrl" (OuterVolumeSpecName: "kube-api-access-zscrl") pod "9c6c4396-5e8f-4396-9493-c1ddad2c87a5" (UID: "9c6c4396-5e8f-4396-9493-c1ddad2c87a5"). InnerVolumeSpecName "kube-api-access-zscrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:02:47 crc kubenswrapper[4813]: I0320 16:02:47.944606 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c6c4396-5e8f-4396-9493-c1ddad2c87a5" (UID: "9c6c4396-5e8f-4396-9493-c1ddad2c87a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.006900 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zscrl\" (UniqueName: \"kubernetes.io/projected/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-kube-api-access-zscrl\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.006941 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.006953 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6c4396-5e8f-4396-9493-c1ddad2c87a5-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.241067 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.281914 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.683519 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b7z4s" event={"ID":"9c6c4396-5e8f-4396-9493-c1ddad2c87a5","Type":"ContainerDied","Data":"545585ea69e64e3e221f95a0e52f55f8e41cae8d516572db830f09b126e0d350"} Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.683907 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.683935 4813 scope.go:117] "RemoveContainer" containerID="81a8a388794276d4027ed025db720145ccce111c5e03f204dc63cc1de9aa8815" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.683537 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b7z4s" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.711083 4813 scope.go:117] "RemoveContainer" containerID="0434699ba4f3b17ce706bb8820b0732b653744fbd62c0ab3057ed3f11aa434e8" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.714996 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7z4s"] Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.715169 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.724057 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b7z4s"] Mar 20 16:02:48 crc kubenswrapper[4813]: I0320 16:02:48.740051 4813 scope.go:117] "RemoveContainer" containerID="c88d25a2394ac374127a53781101774b4a6b066aac51905ec4c8181bd025cb6c" Mar 20 16:02:49 crc kubenswrapper[4813]: I0320 16:02:49.275983 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" path="/var/lib/kubelet/pods/9c6c4396-5e8f-4396-9493-c1ddad2c87a5/volumes" Mar 20 16:02:50 crc kubenswrapper[4813]: I0320 16:02:50.278525 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:50 crc kubenswrapper[4813]: I0320 16:02:50.302556 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:02:50 crc kubenswrapper[4813]: I0320 16:02:50.738026 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:03:03 crc kubenswrapper[4813]: I0320 16:03:03.843030 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:03:03 crc kubenswrapper[4813]: I0320 16:03:03.843670 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:03:04 crc kubenswrapper[4813]: I0320 16:03:04.233407 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:03:33 crc kubenswrapper[4813]: I0320 16:03:33.842886 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:03:33 crc kubenswrapper[4813]: I0320 16:03:33.843403 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.144533 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567044-x6drx"] Mar 20 16:04:00 crc kubenswrapper[4813]: E0320 16:04:00.146037 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="registry-server" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.146056 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="registry-server" Mar 20 16:04:00 crc kubenswrapper[4813]: E0320 16:04:00.146090 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="extract-utilities" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.146098 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="extract-utilities" Mar 20 16:04:00 crc kubenswrapper[4813]: E0320 16:04:00.146113 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="extract-content" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.146120 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="extract-content" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.146290 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c6c4396-5e8f-4396-9493-c1ddad2c87a5" containerName="registry-server" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.147229 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567044-x6drx" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.152761 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.153749 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.154001 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.153900 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567044-x6drx"] Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.249414 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z5xn\" (UniqueName: \"kubernetes.io/projected/7679b19d-34cb-404b-9e11-39e1e6a6cf60-kube-api-access-9z5xn\") pod \"auto-csr-approver-29567044-x6drx\" (UID: \"7679b19d-34cb-404b-9e11-39e1e6a6cf60\") " pod="openshift-infra/auto-csr-approver-29567044-x6drx" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.351727 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z5xn\" (UniqueName: \"kubernetes.io/projected/7679b19d-34cb-404b-9e11-39e1e6a6cf60-kube-api-access-9z5xn\") pod \"auto-csr-approver-29567044-x6drx\" (UID: \"7679b19d-34cb-404b-9e11-39e1e6a6cf60\") " pod="openshift-infra/auto-csr-approver-29567044-x6drx" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.375734 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z5xn\" (UniqueName: \"kubernetes.io/projected/7679b19d-34cb-404b-9e11-39e1e6a6cf60-kube-api-access-9z5xn\") pod \"auto-csr-approver-29567044-x6drx\" (UID: \"7679b19d-34cb-404b-9e11-39e1e6a6cf60\") " pod="openshift-infra/auto-csr-approver-29567044-x6drx" Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.480856 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567044-x6drx" Mar 20 16:04:00 crc kubenswrapper[4813]: W0320 16:04:00.985262 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7679b19d_34cb_404b_9e11_39e1e6a6cf60.slice/crio-4d72d27c6cf36f8d1a683ec84eaf04b31e8d8575ff4ecd14a557e8e0dedda1cd WatchSource:0}: Error finding container 4d72d27c6cf36f8d1a683ec84eaf04b31e8d8575ff4ecd14a557e8e0dedda1cd: Status 404 returned error can't find the container with id 4d72d27c6cf36f8d1a683ec84eaf04b31e8d8575ff4ecd14a557e8e0dedda1cd Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.986167 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567044-x6drx"] Mar 20 16:04:00 crc kubenswrapper[4813]: I0320 16:04:00.988301 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 16:04:01 crc kubenswrapper[4813]: I0320 16:04:01.328017 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567044-x6drx" event={"ID":"7679b19d-34cb-404b-9e11-39e1e6a6cf60","Type":"ContainerStarted","Data":"4d72d27c6cf36f8d1a683ec84eaf04b31e8d8575ff4ecd14a557e8e0dedda1cd"} Mar 20 16:04:02 crc kubenswrapper[4813]: I0320 16:04:02.337313 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567044-x6drx" event={"ID":"7679b19d-34cb-404b-9e11-39e1e6a6cf60","Type":"ContainerStarted","Data":"83bfe169cc29938503d083ac1edfd46931bff4e2b1ac10da0a676fc2b16fae82"} Mar 20 16:04:02 crc kubenswrapper[4813]: I0320 16:04:02.359211 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29567044-x6drx" podStartSLOduration=1.357553972 podStartE2EDuration="2.359192959s" podCreationTimestamp="2026-03-20 16:04:00 +0000 UTC" firstStartedPulling="2026-03-20 16:04:00.988031524 +0000 UTC m=+1570.410734365" lastFinishedPulling="2026-03-20 16:04:01.989670501 +0000 UTC m=+1571.412373352" observedRunningTime="2026-03-20 16:04:02.350198134 +0000 UTC m=+1571.772900975" watchObservedRunningTime="2026-03-20 16:04:02.359192959 +0000 UTC m=+1571.781895800" Mar 20 16:04:03 crc kubenswrapper[4813]: I0320 16:04:03.346731 4813 generic.go:334] "Generic (PLEG): container finished" podID="7679b19d-34cb-404b-9e11-39e1e6a6cf60" containerID="83bfe169cc29938503d083ac1edfd46931bff4e2b1ac10da0a676fc2b16fae82" exitCode=0 Mar 20 16:04:03 crc kubenswrapper[4813]: I0320 16:04:03.347194 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567044-x6drx" event={"ID":"7679b19d-34cb-404b-9e11-39e1e6a6cf60","Type":"ContainerDied","Data":"83bfe169cc29938503d083ac1edfd46931bff4e2b1ac10da0a676fc2b16fae82"} Mar 20 16:04:03 crc kubenswrapper[4813]: I0320 16:04:03.842743 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:04:03 crc kubenswrapper[4813]: I0320 16:04:03.843000 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:04:03 crc kubenswrapper[4813]: I0320 16:04:03.843146 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 16:04:03 crc kubenswrapper[4813]: I0320 16:04:03.843808 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"238a56263adb7631a4a6e1bdb38d22562989800e25b84edc88b747390df1d3e7"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 16:04:03 crc kubenswrapper[4813]: I0320 16:04:03.843930 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://238a56263adb7631a4a6e1bdb38d22562989800e25b84edc88b747390df1d3e7" gracePeriod=600 Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.358639 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="238a56263adb7631a4a6e1bdb38d22562989800e25b84edc88b747390df1d3e7" exitCode=0 Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.358731 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"238a56263adb7631a4a6e1bdb38d22562989800e25b84edc88b747390df1d3e7"} Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.358949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4"} Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.358972 4813 scope.go:117] "RemoveContainer" containerID="957002a8822874ec45a5cbe2ca3717cafc8492693f474f6fa7fd364b2cfa8d50" Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.695067 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567044-x6drx" Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.729987 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9z5xn\" (UniqueName: \"kubernetes.io/projected/7679b19d-34cb-404b-9e11-39e1e6a6cf60-kube-api-access-9z5xn\") pod \"7679b19d-34cb-404b-9e11-39e1e6a6cf60\" (UID: \"7679b19d-34cb-404b-9e11-39e1e6a6cf60\") " Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.736826 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7679b19d-34cb-404b-9e11-39e1e6a6cf60-kube-api-access-9z5xn" (OuterVolumeSpecName: "kube-api-access-9z5xn") pod "7679b19d-34cb-404b-9e11-39e1e6a6cf60" (UID: "7679b19d-34cb-404b-9e11-39e1e6a6cf60"). InnerVolumeSpecName "kube-api-access-9z5xn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:04:04 crc kubenswrapper[4813]: I0320 16:04:04.832504 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9z5xn\" (UniqueName: \"kubernetes.io/projected/7679b19d-34cb-404b-9e11-39e1e6a6cf60-kube-api-access-9z5xn\") on node \"crc\" DevicePath \"\"" Mar 20 16:04:05 crc kubenswrapper[4813]: I0320 16:04:05.370421 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567044-x6drx" event={"ID":"7679b19d-34cb-404b-9e11-39e1e6a6cf60","Type":"ContainerDied","Data":"4d72d27c6cf36f8d1a683ec84eaf04b31e8d8575ff4ecd14a557e8e0dedda1cd"} Mar 20 16:04:05 crc kubenswrapper[4813]: I0320 16:04:05.370458 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d72d27c6cf36f8d1a683ec84eaf04b31e8d8575ff4ecd14a557e8e0dedda1cd" Mar 20 16:04:05 crc kubenswrapper[4813]: I0320 16:04:05.370454 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567044-x6drx" Mar 20 16:04:05 crc kubenswrapper[4813]: I0320 16:04:05.428504 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567038-9tndc"] Mar 20 16:04:05 crc kubenswrapper[4813]: I0320 16:04:05.437151 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567038-9tndc"] Mar 20 16:04:07 crc kubenswrapper[4813]: I0320 16:04:07.275090 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dffeaf94-f594-4a3e-8672-fbcf49398f05" path="/var/lib/kubelet/pods/dffeaf94-f594-4a3e-8672-fbcf49398f05/volumes" Mar 20 16:04:16 crc kubenswrapper[4813]: I0320 16:04:16.237867 4813 scope.go:117] "RemoveContainer" containerID="6186cce06bed4cf3ee6e4df1bd3da424b20239dd1ddf2135eadbf98430890c0e" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.737367 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-frrmd"] Mar 20 16:05:50 crc kubenswrapper[4813]: E0320 16:05:50.738040 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7679b19d-34cb-404b-9e11-39e1e6a6cf60" containerName="oc" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.738051 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7679b19d-34cb-404b-9e11-39e1e6a6cf60" containerName="oc" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.738213 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7679b19d-34cb-404b-9e11-39e1e6a6cf60" containerName="oc" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.739333 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.766809 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-frrmd"] Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.860609 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-utilities\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.860748 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmhlj\" (UniqueName: \"kubernetes.io/projected/c0f6ab43-6a9d-4776-a105-f578ab82c359-kube-api-access-jmhlj\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.861296 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-catalog-content\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.978460 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-catalog-content\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.978572 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-utilities\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.978856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmhlj\" (UniqueName: \"kubernetes.io/projected/c0f6ab43-6a9d-4776-a105-f578ab82c359-kube-api-access-jmhlj\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.979857 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-catalog-content\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:50 crc kubenswrapper[4813]: I0320 16:05:50.980082 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-utilities\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:51 crc kubenswrapper[4813]: I0320 16:05:51.000579 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmhlj\" (UniqueName: \"kubernetes.io/projected/c0f6ab43-6a9d-4776-a105-f578ab82c359-kube-api-access-jmhlj\") pod \"community-operators-frrmd\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:51 crc kubenswrapper[4813]: I0320 16:05:51.063007 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:05:51 crc kubenswrapper[4813]: I0320 16:05:51.561893 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-frrmd"] Mar 20 16:05:51 crc kubenswrapper[4813]: W0320 16:05:51.574219 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0f6ab43_6a9d_4776_a105_f578ab82c359.slice/crio-0cbad02af44d144d592d5f79bdd2ccae2651b35a98505ababb2dc0794cb54629 WatchSource:0}: Error finding container 0cbad02af44d144d592d5f79bdd2ccae2651b35a98505ababb2dc0794cb54629: Status 404 returned error can't find the container with id 0cbad02af44d144d592d5f79bdd2ccae2651b35a98505ababb2dc0794cb54629 Mar 20 16:05:52 crc kubenswrapper[4813]: I0320 16:05:52.275432 4813 generic.go:334] "Generic (PLEG): container finished" podID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerID="b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f" exitCode=0 Mar 20 16:05:52 crc kubenswrapper[4813]: I0320 16:05:52.275516 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frrmd" event={"ID":"c0f6ab43-6a9d-4776-a105-f578ab82c359","Type":"ContainerDied","Data":"b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f"} Mar 20 16:05:52 crc kubenswrapper[4813]: I0320 16:05:52.275553 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frrmd" event={"ID":"c0f6ab43-6a9d-4776-a105-f578ab82c359","Type":"ContainerStarted","Data":"0cbad02af44d144d592d5f79bdd2ccae2651b35a98505ababb2dc0794cb54629"} Mar 20 16:05:53 crc kubenswrapper[4813]: I0320 16:05:53.296096 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frrmd" event={"ID":"c0f6ab43-6a9d-4776-a105-f578ab82c359","Type":"ContainerStarted","Data":"a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389"} Mar 20 16:05:54 crc kubenswrapper[4813]: I0320 16:05:54.305809 4813 generic.go:334] "Generic (PLEG): container finished" podID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerID="a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389" exitCode=0 Mar 20 16:05:54 crc kubenswrapper[4813]: I0320 16:05:54.306107 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frrmd" event={"ID":"c0f6ab43-6a9d-4776-a105-f578ab82c359","Type":"ContainerDied","Data":"a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389"} Mar 20 16:05:55 crc kubenswrapper[4813]: I0320 16:05:55.316900 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frrmd" event={"ID":"c0f6ab43-6a9d-4776-a105-f578ab82c359","Type":"ContainerStarted","Data":"4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381"} Mar 20 16:05:55 crc kubenswrapper[4813]: I0320 16:05:55.338096 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-frrmd" podStartSLOduration=2.755712493 podStartE2EDuration="5.3380791s" podCreationTimestamp="2026-03-20 16:05:50 +0000 UTC" firstStartedPulling="2026-03-20 16:05:52.276981726 +0000 UTC m=+1681.699684557" lastFinishedPulling="2026-03-20 16:05:54.859348313 +0000 UTC m=+1684.282051164" observedRunningTime="2026-03-20 16:05:55.332558649 +0000 UTC m=+1684.755261500" watchObservedRunningTime="2026-03-20 16:05:55.3380791 +0000 UTC m=+1684.760781951" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.139127 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567046-rcvzn"] Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.141085 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.146787 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567046-rcvzn"] Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.180968 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.181004 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.181302 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.235676 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rvb6\" (UniqueName: \"kubernetes.io/projected/6c9c37d6-3b34-48c4-ab05-eea8a0981fc6-kube-api-access-4rvb6\") pod \"auto-csr-approver-29567046-rcvzn\" (UID: \"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6\") " pod="openshift-infra/auto-csr-approver-29567046-rcvzn" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.337320 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rvb6\" (UniqueName: \"kubernetes.io/projected/6c9c37d6-3b34-48c4-ab05-eea8a0981fc6-kube-api-access-4rvb6\") pod \"auto-csr-approver-29567046-rcvzn\" (UID: \"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6\") " pod="openshift-infra/auto-csr-approver-29567046-rcvzn" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.357786 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rvb6\" (UniqueName: \"kubernetes.io/projected/6c9c37d6-3b34-48c4-ab05-eea8a0981fc6-kube-api-access-4rvb6\") pod \"auto-csr-approver-29567046-rcvzn\" (UID: \"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6\") " pod="openshift-infra/auto-csr-approver-29567046-rcvzn" Mar 20 16:06:00 crc kubenswrapper[4813]: I0320 16:06:00.495508 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" Mar 20 16:06:01 crc kubenswrapper[4813]: I0320 16:06:01.043146 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567046-rcvzn"] Mar 20 16:06:01 crc kubenswrapper[4813]: I0320 16:06:01.063752 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:06:01 crc kubenswrapper[4813]: I0320 16:06:01.063804 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:06:01 crc kubenswrapper[4813]: I0320 16:06:01.108370 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:06:01 crc kubenswrapper[4813]: I0320 16:06:01.367261 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" event={"ID":"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6","Type":"ContainerStarted","Data":"fe2e858ddb99e5810cbe92fe5c1d504576983e55eaceb678182e2afd0ea84bc2"} Mar 20 16:06:01 crc kubenswrapper[4813]: I0320 16:06:01.420411 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:06:02 crc kubenswrapper[4813]: I0320 16:06:02.375639 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" event={"ID":"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6","Type":"ContainerStarted","Data":"49ece6f557be574e976dea645212be60df338fe5cf2d47e9c88a02b3713b6a95"} Mar 20 16:06:02 crc kubenswrapper[4813]: I0320 16:06:02.396858 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" podStartSLOduration=1.522111744 podStartE2EDuration="2.396843642s" podCreationTimestamp="2026-03-20 16:06:00 +0000 UTC" firstStartedPulling="2026-03-20 16:06:01.05084519 +0000 UTC m=+1690.473548061" lastFinishedPulling="2026-03-20 16:06:01.925577108 +0000 UTC m=+1691.348279959" observedRunningTime="2026-03-20 16:06:02.39346402 +0000 UTC m=+1691.816166861" watchObservedRunningTime="2026-03-20 16:06:02.396843642 +0000 UTC m=+1691.819546483" Mar 20 16:06:03 crc kubenswrapper[4813]: I0320 16:06:03.385333 4813 generic.go:334] "Generic (PLEG): container finished" podID="6c9c37d6-3b34-48c4-ab05-eea8a0981fc6" containerID="49ece6f557be574e976dea645212be60df338fe5cf2d47e9c88a02b3713b6a95" exitCode=0 Mar 20 16:06:03 crc kubenswrapper[4813]: I0320 16:06:03.385639 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" event={"ID":"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6","Type":"ContainerDied","Data":"49ece6f557be574e976dea645212be60df338fe5cf2d47e9c88a02b3713b6a95"} Mar 20 16:06:04 crc kubenswrapper[4813]: I0320 16:06:04.687513 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" Mar 20 16:06:04 crc kubenswrapper[4813]: I0320 16:06:04.729275 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-frrmd"] Mar 20 16:06:04 crc kubenswrapper[4813]: I0320 16:06:04.729532 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-frrmd" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="registry-server" containerID="cri-o://4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381" gracePeriod=2 Mar 20 16:06:04 crc kubenswrapper[4813]: I0320 16:06:04.816048 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rvb6\" (UniqueName: \"kubernetes.io/projected/6c9c37d6-3b34-48c4-ab05-eea8a0981fc6-kube-api-access-4rvb6\") pod \"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6\" (UID: \"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6\") " Mar 20 16:06:04 crc kubenswrapper[4813]: I0320 16:06:04.820985 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c9c37d6-3b34-48c4-ab05-eea8a0981fc6-kube-api-access-4rvb6" (OuterVolumeSpecName: "kube-api-access-4rvb6") pod "6c9c37d6-3b34-48c4-ab05-eea8a0981fc6" (UID: "6c9c37d6-3b34-48c4-ab05-eea8a0981fc6"). InnerVolumeSpecName "kube-api-access-4rvb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:06:04 crc kubenswrapper[4813]: I0320 16:06:04.918069 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rvb6\" (UniqueName: \"kubernetes.io/projected/6c9c37d6-3b34-48c4-ab05-eea8a0981fc6-kube-api-access-4rvb6\") on node \"crc\" DevicePath \"\"" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.132438 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.324519 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-catalog-content\") pod \"c0f6ab43-6a9d-4776-a105-f578ab82c359\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.324824 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmhlj\" (UniqueName: \"kubernetes.io/projected/c0f6ab43-6a9d-4776-a105-f578ab82c359-kube-api-access-jmhlj\") pod \"c0f6ab43-6a9d-4776-a105-f578ab82c359\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.324924 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-utilities\") pod \"c0f6ab43-6a9d-4776-a105-f578ab82c359\" (UID: \"c0f6ab43-6a9d-4776-a105-f578ab82c359\") " Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.326344 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-utilities" (OuterVolumeSpecName: "utilities") pod "c0f6ab43-6a9d-4776-a105-f578ab82c359" (UID: "c0f6ab43-6a9d-4776-a105-f578ab82c359"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.329334 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0f6ab43-6a9d-4776-a105-f578ab82c359-kube-api-access-jmhlj" (OuterVolumeSpecName: "kube-api-access-jmhlj") pod "c0f6ab43-6a9d-4776-a105-f578ab82c359" (UID: "c0f6ab43-6a9d-4776-a105-f578ab82c359"). InnerVolumeSpecName "kube-api-access-jmhlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.397814 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0f6ab43-6a9d-4776-a105-f578ab82c359" (UID: "c0f6ab43-6a9d-4776-a105-f578ab82c359"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.405283 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.405277 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567046-rcvzn" event={"ID":"6c9c37d6-3b34-48c4-ab05-eea8a0981fc6","Type":"ContainerDied","Data":"fe2e858ddb99e5810cbe92fe5c1d504576983e55eaceb678182e2afd0ea84bc2"} Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.405513 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe2e858ddb99e5810cbe92fe5c1d504576983e55eaceb678182e2afd0ea84bc2" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.411004 4813 generic.go:334] "Generic (PLEG): container finished" podID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerID="4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381" exitCode=0 Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.411042 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frrmd" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.411060 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frrmd" event={"ID":"c0f6ab43-6a9d-4776-a105-f578ab82c359","Type":"ContainerDied","Data":"4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381"} Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.411106 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frrmd" event={"ID":"c0f6ab43-6a9d-4776-a105-f578ab82c359","Type":"ContainerDied","Data":"0cbad02af44d144d592d5f79bdd2ccae2651b35a98505ababb2dc0794cb54629"} Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.411137 4813 scope.go:117] "RemoveContainer" containerID="4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.427613 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.427643 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmhlj\" (UniqueName: \"kubernetes.io/projected/c0f6ab43-6a9d-4776-a105-f578ab82c359-kube-api-access-jmhlj\") on node \"crc\" DevicePath \"\"" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.427653 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6ab43-6a9d-4776-a105-f578ab82c359-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.443757 4813 scope.go:117] "RemoveContainer" containerID="a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.463041 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-frrmd"] Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.469791 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567040-5rzq4"] Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.477046 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-frrmd"] Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.479249 4813 scope.go:117] "RemoveContainer" containerID="b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.484409 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567040-5rzq4"] Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.497613 4813 scope.go:117] "RemoveContainer" containerID="4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381" Mar 20 16:06:05 crc kubenswrapper[4813]: E0320 16:06:05.497945 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381\": container with ID starting with 4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381 not found: ID does not exist" containerID="4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.497983 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381"} err="failed to get container status \"4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381\": rpc error: code = NotFound desc = could not find container \"4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381\": container with ID starting with 4d535ebc9d7f6708d69bd6c234c80c0dd3bdc222649b4b6ae2ca80a90de29381 not found: ID does not exist" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.498009 4813 scope.go:117] "RemoveContainer" containerID="a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389" Mar 20 16:06:05 crc kubenswrapper[4813]: E0320 16:06:05.498188 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389\": container with ID starting with a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389 not found: ID does not exist" containerID="a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.498217 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389"} err="failed to get container status \"a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389\": rpc error: code = NotFound desc = could not find container \"a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389\": container with ID starting with a47254bf89bf8717004950a4a8bf301285d2458acc1e4d471783a0aadf2a8389 not found: ID does not exist" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.498233 4813 scope.go:117] "RemoveContainer" containerID="b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f" Mar 20 16:06:05 crc kubenswrapper[4813]: E0320 16:06:05.498390 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f\": container with ID starting with b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f not found: ID does not exist" containerID="b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f" Mar 20 16:06:05 crc kubenswrapper[4813]: I0320 16:06:05.498415 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f"} err="failed to get container status \"b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f\": rpc error: code = NotFound desc = could not find container \"b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f\": container with ID starting with b0aebcd8c8c351c81fd45c6e936333ade7ed75dd2aca5b7c32088873ad13685f not found: ID does not exist" Mar 20 16:06:07 crc kubenswrapper[4813]: I0320 16:06:07.278588 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" path="/var/lib/kubelet/pods/c0f6ab43-6a9d-4776-a105-f578ab82c359/volumes" Mar 20 16:06:07 crc kubenswrapper[4813]: I0320 16:06:07.279623 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f979cdb9-a7bf-4465-88f6-82f30ac60145" path="/var/lib/kubelet/pods/f979cdb9-a7bf-4465-88f6-82f30ac60145/volumes" Mar 20 16:06:16 crc kubenswrapper[4813]: I0320 16:06:16.446039 4813 scope.go:117] "RemoveContainer" containerID="6900f2425bdb89293e31e98cc8fc49b5c6276b49e10ab5c808a3e94da4cd9b6c" Mar 20 16:06:33 crc kubenswrapper[4813]: I0320 16:06:33.843087 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:06:33 crc kubenswrapper[4813]: I0320 16:06:33.843891 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:07:03 crc kubenswrapper[4813]: I0320 16:07:03.842079 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:07:03 crc kubenswrapper[4813]: I0320 16:07:03.842522 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:07:16 crc kubenswrapper[4813]: I0320 16:07:16.540203 4813 scope.go:117] "RemoveContainer" containerID="961067924eefb9dd1fdaf9b2e74736bebac11be188943e28b99c9f11f67e03b7" Mar 20 16:07:33 crc kubenswrapper[4813]: I0320 16:07:33.842898 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:07:33 crc kubenswrapper[4813]: I0320 16:07:33.843385 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:07:33 crc kubenswrapper[4813]: I0320 16:07:33.843429 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 16:07:33 crc kubenswrapper[4813]: I0320 16:07:33.844066 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 16:07:33 crc kubenswrapper[4813]: I0320 16:07:33.844112 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" gracePeriod=600 Mar 20 16:07:33 crc kubenswrapper[4813]: E0320 16:07:33.987260 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:07:34 crc kubenswrapper[4813]: I0320 16:07:34.225398 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" exitCode=0 Mar 20 16:07:34 crc kubenswrapper[4813]: I0320 16:07:34.225453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4"} Mar 20 16:07:34 crc kubenswrapper[4813]: I0320 16:07:34.225796 4813 scope.go:117] "RemoveContainer" containerID="238a56263adb7631a4a6e1bdb38d22562989800e25b84edc88b747390df1d3e7" Mar 20 16:07:34 crc kubenswrapper[4813]: I0320 16:07:34.226415 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:07:34 crc kubenswrapper[4813]: E0320 16:07:34.226774 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:07:46 crc kubenswrapper[4813]: I0320 16:07:46.266211 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:07:46 crc kubenswrapper[4813]: E0320 16:07:46.266871 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:07:58 crc kubenswrapper[4813]: I0320 16:07:58.266314 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:07:58 crc kubenswrapper[4813]: E0320 16:07:58.267129 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.152204 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567048-qs9lf"] Mar 20 16:08:00 crc kubenswrapper[4813]: E0320 16:08:00.152778 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c9c37d6-3b34-48c4-ab05-eea8a0981fc6" containerName="oc" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.152796 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c9c37d6-3b34-48c4-ab05-eea8a0981fc6" containerName="oc" Mar 20 16:08:00 crc kubenswrapper[4813]: E0320 16:08:00.152820 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="registry-server" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.152828 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="registry-server" Mar 20 16:08:00 crc kubenswrapper[4813]: E0320 16:08:00.152845 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="extract-content" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.152855 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="extract-content" Mar 20 16:08:00 crc kubenswrapper[4813]: E0320 16:08:00.152873 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="extract-utilities" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.152883 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="extract-utilities" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.153121 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c9c37d6-3b34-48c4-ab05-eea8a0981fc6" containerName="oc" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.153148 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0f6ab43-6a9d-4776-a105-f578ab82c359" containerName="registry-server" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.153832 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567048-qs9lf" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.156016 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.157518 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.159592 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567048-qs9lf"] Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.161955 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.224878 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fthzc\" (UniqueName: \"kubernetes.io/projected/32249127-220e-4205-869a-e7435218b685-kube-api-access-fthzc\") pod \"auto-csr-approver-29567048-qs9lf\" (UID: \"32249127-220e-4205-869a-e7435218b685\") " pod="openshift-infra/auto-csr-approver-29567048-qs9lf" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.327318 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fthzc\" (UniqueName: \"kubernetes.io/projected/32249127-220e-4205-869a-e7435218b685-kube-api-access-fthzc\") pod \"auto-csr-approver-29567048-qs9lf\" (UID: \"32249127-220e-4205-869a-e7435218b685\") " pod="openshift-infra/auto-csr-approver-29567048-qs9lf" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.348422 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fthzc\" (UniqueName: \"kubernetes.io/projected/32249127-220e-4205-869a-e7435218b685-kube-api-access-fthzc\") pod \"auto-csr-approver-29567048-qs9lf\" (UID: \"32249127-220e-4205-869a-e7435218b685\") " pod="openshift-infra/auto-csr-approver-29567048-qs9lf" Mar 20 16:08:00 crc kubenswrapper[4813]: I0320 16:08:00.480260 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567048-qs9lf" Mar 20 16:08:01 crc kubenswrapper[4813]: I0320 16:08:01.004149 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567048-qs9lf"] Mar 20 16:08:01 crc kubenswrapper[4813]: I0320 16:08:01.685416 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567048-qs9lf" event={"ID":"32249127-220e-4205-869a-e7435218b685","Type":"ContainerStarted","Data":"e1b5c1ec0c52bd2e6330c1677fc5145f84ded56f817d3209306548f114134f01"} Mar 20 16:08:02 crc kubenswrapper[4813]: I0320 16:08:02.697251 4813 generic.go:334] "Generic (PLEG): container finished" podID="32249127-220e-4205-869a-e7435218b685" containerID="2c04abcd2fa036ef776b351a6d03d03d33ed7ec1e57305d0a59c14eeb9fb899e" exitCode=0 Mar 20 16:08:02 crc kubenswrapper[4813]: I0320 16:08:02.697289 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567048-qs9lf" event={"ID":"32249127-220e-4205-869a-e7435218b685","Type":"ContainerDied","Data":"2c04abcd2fa036ef776b351a6d03d03d33ed7ec1e57305d0a59c14eeb9fb899e"} Mar 20 16:08:04 crc kubenswrapper[4813]: I0320 16:08:04.048706 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567048-qs9lf" Mar 20 16:08:04 crc kubenswrapper[4813]: I0320 16:08:04.091821 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fthzc\" (UniqueName: \"kubernetes.io/projected/32249127-220e-4205-869a-e7435218b685-kube-api-access-fthzc\") pod \"32249127-220e-4205-869a-e7435218b685\" (UID: \"32249127-220e-4205-869a-e7435218b685\") " Mar 20 16:08:04 crc kubenswrapper[4813]: I0320 16:08:04.098175 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32249127-220e-4205-869a-e7435218b685-kube-api-access-fthzc" (OuterVolumeSpecName: "kube-api-access-fthzc") pod "32249127-220e-4205-869a-e7435218b685" (UID: "32249127-220e-4205-869a-e7435218b685"). InnerVolumeSpecName "kube-api-access-fthzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:08:04 crc kubenswrapper[4813]: I0320 16:08:04.193192 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fthzc\" (UniqueName: \"kubernetes.io/projected/32249127-220e-4205-869a-e7435218b685-kube-api-access-fthzc\") on node \"crc\" DevicePath \"\"" Mar 20 16:08:04 crc kubenswrapper[4813]: I0320 16:08:04.720606 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567048-qs9lf" event={"ID":"32249127-220e-4205-869a-e7435218b685","Type":"ContainerDied","Data":"e1b5c1ec0c52bd2e6330c1677fc5145f84ded56f817d3209306548f114134f01"} Mar 20 16:08:04 crc kubenswrapper[4813]: I0320 16:08:04.720677 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1b5c1ec0c52bd2e6330c1677fc5145f84ded56f817d3209306548f114134f01" Mar 20 16:08:04 crc kubenswrapper[4813]: I0320 16:08:04.720686 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567048-qs9lf" Mar 20 16:08:05 crc kubenswrapper[4813]: I0320 16:08:05.127957 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567042-xqvjm"] Mar 20 16:08:05 crc kubenswrapper[4813]: I0320 16:08:05.134460 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567042-xqvjm"] Mar 20 16:08:05 crc kubenswrapper[4813]: I0320 16:08:05.275232 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df1feb5f-9978-4d2a-aa1d-5fb701529f21" path="/var/lib/kubelet/pods/df1feb5f-9978-4d2a-aa1d-5fb701529f21/volumes" Mar 20 16:08:13 crc kubenswrapper[4813]: I0320 16:08:13.266203 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:08:13 crc kubenswrapper[4813]: E0320 16:08:13.266801 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:08:16 crc kubenswrapper[4813]: I0320 16:08:16.627162 4813 scope.go:117] "RemoveContainer" containerID="9bb2db071164b6cefa4f445a6768f2913a8dfdeb3c7001d00f50b0e623519d87" Mar 20 16:08:25 crc kubenswrapper[4813]: I0320 16:08:25.266004 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:08:25 crc kubenswrapper[4813]: E0320 16:08:25.266699 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:08:37 crc kubenswrapper[4813]: I0320 16:08:37.271413 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:08:37 crc kubenswrapper[4813]: E0320 16:08:37.272220 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:08:52 crc kubenswrapper[4813]: I0320 16:08:52.266261 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:08:52 crc kubenswrapper[4813]: E0320 16:08:52.267062 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:09:05 crc kubenswrapper[4813]: I0320 16:09:05.266230 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:09:05 crc kubenswrapper[4813]: E0320 16:09:05.268022 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:09:17 crc kubenswrapper[4813]: I0320 16:09:17.267263 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:09:17 crc kubenswrapper[4813]: E0320 16:09:17.267879 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:09:32 crc kubenswrapper[4813]: I0320 16:09:32.093615 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-db-create-gsq8f"] Mar 20 16:09:32 crc kubenswrapper[4813]: I0320 16:09:32.103176 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-db-create-gsq8f"] Mar 20 16:09:32 crc kubenswrapper[4813]: I0320 16:09:32.266334 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:09:32 crc kubenswrapper[4813]: E0320 16:09:32.266693 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:09:33 crc kubenswrapper[4813]: I0320 16:09:33.026032 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/root-account-create-update-ddxhk"] Mar 20 16:09:33 crc kubenswrapper[4813]: I0320 16:09:33.033696 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-e994-account-create-update-45jsg"] Mar 20 16:09:33 crc kubenswrapper[4813]: I0320 16:09:33.040088 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-e994-account-create-update-45jsg"] Mar 20 16:09:33 crc kubenswrapper[4813]: I0320 16:09:33.047008 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/root-account-create-update-ddxhk"] Mar 20 16:09:33 crc kubenswrapper[4813]: I0320 16:09:33.277747 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39739c2c-937f-4b31-b6f7-9f04c13411c8" path="/var/lib/kubelet/pods/39739c2c-937f-4b31-b6f7-9f04c13411c8/volumes" Mar 20 16:09:33 crc kubenswrapper[4813]: I0320 16:09:33.278407 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93895b7a-2be8-44b3-92d0-ad24c832633b" path="/var/lib/kubelet/pods/93895b7a-2be8-44b3-92d0-ad24c832633b/volumes" Mar 20 16:09:33 crc kubenswrapper[4813]: I0320 16:09:33.279042 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec418c85-33d1-4b27-9b14-bfb444247571" path="/var/lib/kubelet/pods/ec418c85-33d1-4b27-9b14-bfb444247571/volumes" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.741360 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-n7gnc"] Mar 20 16:09:37 crc kubenswrapper[4813]: E0320 16:09:37.742228 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32249127-220e-4205-869a-e7435218b685" containerName="oc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.742247 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="32249127-220e-4205-869a-e7435218b685" containerName="oc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.742532 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="32249127-220e-4205-869a-e7435218b685" containerName="oc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.744343 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.755364 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n7gnc"] Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.859022 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-utilities\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.859100 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-catalog-content\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.859427 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8bs8\" (UniqueName: \"kubernetes.io/projected/74319e50-e2eb-4579-9e0d-ec183e54f673-kube-api-access-g8bs8\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.961248 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8bs8\" (UniqueName: \"kubernetes.io/projected/74319e50-e2eb-4579-9e0d-ec183e54f673-kube-api-access-g8bs8\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.961352 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-utilities\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.961403 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-catalog-content\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.961922 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-utilities\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.962032 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-catalog-content\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:37 crc kubenswrapper[4813]: I0320 16:09:37.984037 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8bs8\" (UniqueName: \"kubernetes.io/projected/74319e50-e2eb-4579-9e0d-ec183e54f673-kube-api-access-g8bs8\") pod \"certified-operators-n7gnc\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:38 crc kubenswrapper[4813]: I0320 16:09:38.078189 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:38 crc kubenswrapper[4813]: I0320 16:09:38.622928 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n7gnc"] Mar 20 16:09:39 crc kubenswrapper[4813]: I0320 16:09:39.549584 4813 generic.go:334] "Generic (PLEG): container finished" podID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerID="3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41" exitCode=0 Mar 20 16:09:39 crc kubenswrapper[4813]: I0320 16:09:39.549667 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n7gnc" event={"ID":"74319e50-e2eb-4579-9e0d-ec183e54f673","Type":"ContainerDied","Data":"3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41"} Mar 20 16:09:39 crc kubenswrapper[4813]: I0320 16:09:39.549885 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n7gnc" event={"ID":"74319e50-e2eb-4579-9e0d-ec183e54f673","Type":"ContainerStarted","Data":"648604266b2d094d4356774a23d740e9083eb6b4ccb04c75c492f6b8bf283332"} Mar 20 16:09:39 crc kubenswrapper[4813]: I0320 16:09:39.552986 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 16:09:41 crc kubenswrapper[4813]: I0320 16:09:41.567774 4813 generic.go:334] "Generic (PLEG): container finished" podID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerID="f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba" exitCode=0 Mar 20 16:09:41 crc kubenswrapper[4813]: I0320 16:09:41.567999 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n7gnc" event={"ID":"74319e50-e2eb-4579-9e0d-ec183e54f673","Type":"ContainerDied","Data":"f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba"} Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.534229 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ksxgc"] Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.536211 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.566672 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ksxgc"] Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.602775 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n7gnc" event={"ID":"74319e50-e2eb-4579-9e0d-ec183e54f673","Type":"ContainerStarted","Data":"6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06"} Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.624334 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-n7gnc" podStartSLOduration=2.970431932 podStartE2EDuration="5.624314255s" podCreationTimestamp="2026-03-20 16:09:37 +0000 UTC" firstStartedPulling="2026-03-20 16:09:39.552701392 +0000 UTC m=+1908.975404243" lastFinishedPulling="2026-03-20 16:09:42.206583725 +0000 UTC m=+1911.629286566" observedRunningTime="2026-03-20 16:09:42.621593782 +0000 UTC m=+1912.044296623" watchObservedRunningTime="2026-03-20 16:09:42.624314255 +0000 UTC m=+1912.047017096" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.637248 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-utilities\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.637311 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-catalog-content\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.637392 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp46x\" (UniqueName: \"kubernetes.io/projected/fe5e1a2a-f893-485c-ba42-fac77c6353f9-kube-api-access-cp46x\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.738620 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp46x\" (UniqueName: \"kubernetes.io/projected/fe5e1a2a-f893-485c-ba42-fac77c6353f9-kube-api-access-cp46x\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.738764 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-utilities\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.738822 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-catalog-content\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.739431 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-utilities\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.739495 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-catalog-content\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.764538 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp46x\" (UniqueName: \"kubernetes.io/projected/fe5e1a2a-f893-485c-ba42-fac77c6353f9-kube-api-access-cp46x\") pod \"redhat-operators-ksxgc\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:42 crc kubenswrapper[4813]: I0320 16:09:42.892683 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:43 crc kubenswrapper[4813]: I0320 16:09:43.266821 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:09:43 crc kubenswrapper[4813]: E0320 16:09:43.267204 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:09:43 crc kubenswrapper[4813]: W0320 16:09:43.433170 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe5e1a2a_f893_485c_ba42_fac77c6353f9.slice/crio-5c40e3e7d85313531e2e8a96a01b531756bf0bdfaccc84d13084d3fac0630d2c WatchSource:0}: Error finding container 5c40e3e7d85313531e2e8a96a01b531756bf0bdfaccc84d13084d3fac0630d2c: Status 404 returned error can't find the container with id 5c40e3e7d85313531e2e8a96a01b531756bf0bdfaccc84d13084d3fac0630d2c Mar 20 16:09:43 crc kubenswrapper[4813]: I0320 16:09:43.436758 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ksxgc"] Mar 20 16:09:43 crc kubenswrapper[4813]: I0320 16:09:43.618015 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksxgc" event={"ID":"fe5e1a2a-f893-485c-ba42-fac77c6353f9","Type":"ContainerStarted","Data":"5c40e3e7d85313531e2e8a96a01b531756bf0bdfaccc84d13084d3fac0630d2c"} Mar 20 16:09:44 crc kubenswrapper[4813]: I0320 16:09:44.628088 4813 generic.go:334] "Generic (PLEG): container finished" podID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerID="787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c" exitCode=0 Mar 20 16:09:44 crc kubenswrapper[4813]: I0320 16:09:44.628503 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksxgc" event={"ID":"fe5e1a2a-f893-485c-ba42-fac77c6353f9","Type":"ContainerDied","Data":"787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c"} Mar 20 16:09:46 crc kubenswrapper[4813]: I0320 16:09:46.646893 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksxgc" event={"ID":"fe5e1a2a-f893-485c-ba42-fac77c6353f9","Type":"ContainerStarted","Data":"c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114"} Mar 20 16:09:47 crc kubenswrapper[4813]: I0320 16:09:47.664327 4813 generic.go:334] "Generic (PLEG): container finished" podID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerID="c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114" exitCode=0 Mar 20 16:09:47 crc kubenswrapper[4813]: I0320 16:09:47.664442 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksxgc" event={"ID":"fe5e1a2a-f893-485c-ba42-fac77c6353f9","Type":"ContainerDied","Data":"c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114"} Mar 20 16:09:48 crc kubenswrapper[4813]: I0320 16:09:48.079272 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:48 crc kubenswrapper[4813]: I0320 16:09:48.079387 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:48 crc kubenswrapper[4813]: I0320 16:09:48.121702 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:48 crc kubenswrapper[4813]: I0320 16:09:48.751592 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:50 crc kubenswrapper[4813]: I0320 16:09:50.692984 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksxgc" event={"ID":"fe5e1a2a-f893-485c-ba42-fac77c6353f9","Type":"ContainerStarted","Data":"9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0"} Mar 20 16:09:50 crc kubenswrapper[4813]: I0320 16:09:50.718639 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ksxgc" podStartSLOduration=3.581519739 podStartE2EDuration="8.718621918s" podCreationTimestamp="2026-03-20 16:09:42 +0000 UTC" firstStartedPulling="2026-03-20 16:09:44.630685744 +0000 UTC m=+1914.053388585" lastFinishedPulling="2026-03-20 16:09:49.767787923 +0000 UTC m=+1919.190490764" observedRunningTime="2026-03-20 16:09:50.711340281 +0000 UTC m=+1920.134043132" watchObservedRunningTime="2026-03-20 16:09:50.718621918 +0000 UTC m=+1920.141324769" Mar 20 16:09:52 crc kubenswrapper[4813]: I0320 16:09:52.892838 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:52 crc kubenswrapper[4813]: I0320 16:09:52.894219 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:09:52 crc kubenswrapper[4813]: I0320 16:09:52.929410 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n7gnc"] Mar 20 16:09:52 crc kubenswrapper[4813]: I0320 16:09:52.930006 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-n7gnc" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="registry-server" containerID="cri-o://6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06" gracePeriod=2 Mar 20 16:09:53 crc kubenswrapper[4813]: I0320 16:09:53.937512 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ksxgc" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="registry-server" probeResult="failure" output=< Mar 20 16:09:53 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Mar 20 16:09:53 crc kubenswrapper[4813]: > Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.265423 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:09:54 crc kubenswrapper[4813]: E0320 16:09:54.265670 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.701979 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.728579 4813 generic.go:334] "Generic (PLEG): container finished" podID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerID="6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06" exitCode=0 Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.730271 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n7gnc" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.730882 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n7gnc" event={"ID":"74319e50-e2eb-4579-9e0d-ec183e54f673","Type":"ContainerDied","Data":"6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06"} Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.730910 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n7gnc" event={"ID":"74319e50-e2eb-4579-9e0d-ec183e54f673","Type":"ContainerDied","Data":"648604266b2d094d4356774a23d740e9083eb6b4ccb04c75c492f6b8bf283332"} Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.730927 4813 scope.go:117] "RemoveContainer" containerID="6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.749090 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-catalog-content\") pod \"74319e50-e2eb-4579-9e0d-ec183e54f673\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.749187 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8bs8\" (UniqueName: \"kubernetes.io/projected/74319e50-e2eb-4579-9e0d-ec183e54f673-kube-api-access-g8bs8\") pod \"74319e50-e2eb-4579-9e0d-ec183e54f673\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.749328 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-utilities\") pod \"74319e50-e2eb-4579-9e0d-ec183e54f673\" (UID: \"74319e50-e2eb-4579-9e0d-ec183e54f673\") " Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.750193 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-utilities" (OuterVolumeSpecName: "utilities") pod "74319e50-e2eb-4579-9e0d-ec183e54f673" (UID: "74319e50-e2eb-4579-9e0d-ec183e54f673"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.754758 4813 scope.go:117] "RemoveContainer" containerID="f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.757139 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74319e50-e2eb-4579-9e0d-ec183e54f673-kube-api-access-g8bs8" (OuterVolumeSpecName: "kube-api-access-g8bs8") pod "74319e50-e2eb-4579-9e0d-ec183e54f673" (UID: "74319e50-e2eb-4579-9e0d-ec183e54f673"). InnerVolumeSpecName "kube-api-access-g8bs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.798030 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "74319e50-e2eb-4579-9e0d-ec183e54f673" (UID: "74319e50-e2eb-4579-9e0d-ec183e54f673"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.808181 4813 scope.go:117] "RemoveContainer" containerID="3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.828528 4813 scope.go:117] "RemoveContainer" containerID="6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06" Mar 20 16:09:54 crc kubenswrapper[4813]: E0320 16:09:54.829062 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06\": container with ID starting with 6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06 not found: ID does not exist" containerID="6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.829104 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06"} err="failed to get container status \"6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06\": rpc error: code = NotFound desc = could not find container \"6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06\": container with ID starting with 6ff2364759742c75ad1916fff3ee3eb08bc988355c5997d9fe629ff4ae5fbd06 not found: ID does not exist" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.829131 4813 scope.go:117] "RemoveContainer" containerID="f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba" Mar 20 16:09:54 crc kubenswrapper[4813]: E0320 16:09:54.829679 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba\": container with ID starting with f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba not found: ID does not exist" containerID="f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.829745 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba"} err="failed to get container status \"f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba\": rpc error: code = NotFound desc = could not find container \"f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba\": container with ID starting with f68938db394bb0e1d1f7bfa9e892aab354dc96dd2ccff302de1984e69b121fba not found: ID does not exist" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.829785 4813 scope.go:117] "RemoveContainer" containerID="3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41" Mar 20 16:09:54 crc kubenswrapper[4813]: E0320 16:09:54.830145 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41\": container with ID starting with 3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41 not found: ID does not exist" containerID="3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.830262 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41"} err="failed to get container status \"3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41\": rpc error: code = NotFound desc = could not find container \"3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41\": container with ID starting with 3ffe9dd86007d958f1f819ec023d34406a25d7444c62e540f0aca90704797a41 not found: ID does not exist" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.851004 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.851242 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74319e50-e2eb-4579-9e0d-ec183e54f673-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:09:54 crc kubenswrapper[4813]: I0320 16:09:54.851332 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8bs8\" (UniqueName: \"kubernetes.io/projected/74319e50-e2eb-4579-9e0d-ec183e54f673-kube-api-access-g8bs8\") on node \"crc\" DevicePath \"\"" Mar 20 16:09:55 crc kubenswrapper[4813]: I0320 16:09:55.069503 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n7gnc"] Mar 20 16:09:55 crc kubenswrapper[4813]: I0320 16:09:55.077744 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-n7gnc"] Mar 20 16:09:55 crc kubenswrapper[4813]: I0320 16:09:55.278361 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" path="/var/lib/kubelet/pods/74319e50-e2eb-4579-9e0d-ec183e54f673/volumes" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.147563 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567050-mb2hn"] Mar 20 16:10:00 crc kubenswrapper[4813]: E0320 16:10:00.148528 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="extract-utilities" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.148543 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="extract-utilities" Mar 20 16:10:00 crc kubenswrapper[4813]: E0320 16:10:00.148565 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="extract-content" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.148572 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="extract-content" Mar 20 16:10:00 crc kubenswrapper[4813]: E0320 16:10:00.148593 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="registry-server" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.148601 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="registry-server" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.148785 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="74319e50-e2eb-4579-9e0d-ec183e54f673" containerName="registry-server" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.149500 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567050-mb2hn" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.153471 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.154415 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.159802 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.179505 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567050-mb2hn"] Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.238992 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w88qt\" (UniqueName: \"kubernetes.io/projected/04dacd92-0c61-482c-8001-f2c738614408-kube-api-access-w88qt\") pod \"auto-csr-approver-29567050-mb2hn\" (UID: \"04dacd92-0c61-482c-8001-f2c738614408\") " pod="openshift-infra/auto-csr-approver-29567050-mb2hn" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.340636 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w88qt\" (UniqueName: \"kubernetes.io/projected/04dacd92-0c61-482c-8001-f2c738614408-kube-api-access-w88qt\") pod \"auto-csr-approver-29567050-mb2hn\" (UID: \"04dacd92-0c61-482c-8001-f2c738614408\") " pod="openshift-infra/auto-csr-approver-29567050-mb2hn" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.362232 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w88qt\" (UniqueName: \"kubernetes.io/projected/04dacd92-0c61-482c-8001-f2c738614408-kube-api-access-w88qt\") pod \"auto-csr-approver-29567050-mb2hn\" (UID: \"04dacd92-0c61-482c-8001-f2c738614408\") " pod="openshift-infra/auto-csr-approver-29567050-mb2hn" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.475818 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567050-mb2hn" Mar 20 16:10:00 crc kubenswrapper[4813]: I0320 16:10:00.932294 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567050-mb2hn"] Mar 20 16:10:01 crc kubenswrapper[4813]: I0320 16:10:01.807126 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567050-mb2hn" event={"ID":"04dacd92-0c61-482c-8001-f2c738614408","Type":"ContainerStarted","Data":"53bc38f19d40bc1e9dea329e5e31e9703bc0ff30b2bf0a6f893b743ccc66bfc0"} Mar 20 16:10:03 crc kubenswrapper[4813]: I0320 16:10:03.823828 4813 generic.go:334] "Generic (PLEG): container finished" podID="04dacd92-0c61-482c-8001-f2c738614408" containerID="0b69ba0e439619bc3415276a15f214671143d15759cf0b107b80fcb4942e9d00" exitCode=0 Mar 20 16:10:03 crc kubenswrapper[4813]: I0320 16:10:03.823891 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567050-mb2hn" event={"ID":"04dacd92-0c61-482c-8001-f2c738614408","Type":"ContainerDied","Data":"0b69ba0e439619bc3415276a15f214671143d15759cf0b107b80fcb4942e9d00"} Mar 20 16:10:03 crc kubenswrapper[4813]: I0320 16:10:03.951159 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ksxgc" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="registry-server" probeResult="failure" output=< Mar 20 16:10:03 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Mar 20 16:10:03 crc kubenswrapper[4813]: > Mar 20 16:10:05 crc kubenswrapper[4813]: I0320 16:10:05.162327 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567050-mb2hn" Mar 20 16:10:05 crc kubenswrapper[4813]: I0320 16:10:05.213843 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w88qt\" (UniqueName: \"kubernetes.io/projected/04dacd92-0c61-482c-8001-f2c738614408-kube-api-access-w88qt\") pod \"04dacd92-0c61-482c-8001-f2c738614408\" (UID: \"04dacd92-0c61-482c-8001-f2c738614408\") " Mar 20 16:10:05 crc kubenswrapper[4813]: I0320 16:10:05.218688 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04dacd92-0c61-482c-8001-f2c738614408-kube-api-access-w88qt" (OuterVolumeSpecName: "kube-api-access-w88qt") pod "04dacd92-0c61-482c-8001-f2c738614408" (UID: "04dacd92-0c61-482c-8001-f2c738614408"). InnerVolumeSpecName "kube-api-access-w88qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:10:05 crc kubenswrapper[4813]: I0320 16:10:05.315437 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w88qt\" (UniqueName: \"kubernetes.io/projected/04dacd92-0c61-482c-8001-f2c738614408-kube-api-access-w88qt\") on node \"crc\" DevicePath \"\"" Mar 20 16:10:05 crc kubenswrapper[4813]: I0320 16:10:05.841968 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567050-mb2hn" event={"ID":"04dacd92-0c61-482c-8001-f2c738614408","Type":"ContainerDied","Data":"53bc38f19d40bc1e9dea329e5e31e9703bc0ff30b2bf0a6f893b743ccc66bfc0"} Mar 20 16:10:05 crc kubenswrapper[4813]: I0320 16:10:05.842003 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53bc38f19d40bc1e9dea329e5e31e9703bc0ff30b2bf0a6f893b743ccc66bfc0" Mar 20 16:10:05 crc kubenswrapper[4813]: I0320 16:10:05.842019 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567050-mb2hn" Mar 20 16:10:06 crc kubenswrapper[4813]: I0320 16:10:06.236589 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567044-x6drx"] Mar 20 16:10:06 crc kubenswrapper[4813]: I0320 16:10:06.244036 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567044-x6drx"] Mar 20 16:10:07 crc kubenswrapper[4813]: I0320 16:10:07.275946 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7679b19d-34cb-404b-9e11-39e1e6a6cf60" path="/var/lib/kubelet/pods/7679b19d-34cb-404b-9e11-39e1e6a6cf60/volumes" Mar 20 16:10:09 crc kubenswrapper[4813]: I0320 16:10:09.266281 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:10:09 crc kubenswrapper[4813]: E0320 16:10:09.267359 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:10:12 crc kubenswrapper[4813]: I0320 16:10:12.944964 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:10:13 crc kubenswrapper[4813]: I0320 16:10:13.009657 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.129519 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ksxgc"] Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.130102 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ksxgc" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="registry-server" containerID="cri-o://9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0" gracePeriod=2 Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.545552 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.612863 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-catalog-content\") pod \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.612996 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-utilities\") pod \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.613081 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cp46x\" (UniqueName: \"kubernetes.io/projected/fe5e1a2a-f893-485c-ba42-fac77c6353f9-kube-api-access-cp46x\") pod \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\" (UID: \"fe5e1a2a-f893-485c-ba42-fac77c6353f9\") " Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.613766 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-utilities" (OuterVolumeSpecName: "utilities") pod "fe5e1a2a-f893-485c-ba42-fac77c6353f9" (UID: "fe5e1a2a-f893-485c-ba42-fac77c6353f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.618272 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe5e1a2a-f893-485c-ba42-fac77c6353f9-kube-api-access-cp46x" (OuterVolumeSpecName: "kube-api-access-cp46x") pod "fe5e1a2a-f893-485c-ba42-fac77c6353f9" (UID: "fe5e1a2a-f893-485c-ba42-fac77c6353f9"). InnerVolumeSpecName "kube-api-access-cp46x". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.714422 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cp46x\" (UniqueName: \"kubernetes.io/projected/fe5e1a2a-f893-485c-ba42-fac77c6353f9-kube-api-access-cp46x\") on node \"crc\" DevicePath \"\"" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.714451 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.726509 4813 scope.go:117] "RemoveContainer" containerID="83bfe169cc29938503d083ac1edfd46931bff4e2b1ac10da0a676fc2b16fae82" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.752570 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe5e1a2a-f893-485c-ba42-fac77c6353f9" (UID: "fe5e1a2a-f893-485c-ba42-fac77c6353f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.766690 4813 scope.go:117] "RemoveContainer" containerID="4e975ada3dc45792f523b587ae1716d362d9b8fe6badd812ea32e039f85ffba8" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.793635 4813 scope.go:117] "RemoveContainer" containerID="fea26861c38e5fc5e5111dc59de109ea2221de9ed285ad803ca467d681350b17" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.811536 4813 scope.go:117] "RemoveContainer" containerID="ea3d2dcf6598957acf5286dab06492c127c280b038f630a6ca76deda59bc0c10" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.815642 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5e1a2a-f893-485c-ba42-fac77c6353f9-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.950652 4813 generic.go:334] "Generic (PLEG): container finished" podID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerID="9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0" exitCode=0 Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.950692 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksxgc" event={"ID":"fe5e1a2a-f893-485c-ba42-fac77c6353f9","Type":"ContainerDied","Data":"9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0"} Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.950719 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ksxgc" event={"ID":"fe5e1a2a-f893-485c-ba42-fac77c6353f9","Type":"ContainerDied","Data":"5c40e3e7d85313531e2e8a96a01b531756bf0bdfaccc84d13084d3fac0630d2c"} Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.950737 4813 scope.go:117] "RemoveContainer" containerID="9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.950747 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ksxgc" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.981810 4813 scope.go:117] "RemoveContainer" containerID="c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114" Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.986704 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ksxgc"] Mar 20 16:10:16 crc kubenswrapper[4813]: I0320 16:10:16.997149 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ksxgc"] Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.007381 4813 scope.go:117] "RemoveContainer" containerID="787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c" Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.026468 4813 scope.go:117] "RemoveContainer" containerID="9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0" Mar 20 16:10:17 crc kubenswrapper[4813]: E0320 16:10:17.026937 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0\": container with ID starting with 9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0 not found: ID does not exist" containerID="9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0" Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.026993 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0"} err="failed to get container status \"9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0\": rpc error: code = NotFound desc = could not find container \"9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0\": container with ID starting with 9fe29a5ef72942d3166b6ff0890cba69c28370a4aadf5fd16b83a75443d503d0 not found: ID does not exist" Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.027021 4813 scope.go:117] "RemoveContainer" containerID="c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114" Mar 20 16:10:17 crc kubenswrapper[4813]: E0320 16:10:17.027381 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114\": container with ID starting with c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114 not found: ID does not exist" containerID="c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114" Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.027429 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114"} err="failed to get container status \"c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114\": rpc error: code = NotFound desc = could not find container \"c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114\": container with ID starting with c2ad0d06142fd46e9b282dd3ff73136d36999a5171ae3fd790fa04b82f4a8114 not found: ID does not exist" Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.027455 4813 scope.go:117] "RemoveContainer" containerID="787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c" Mar 20 16:10:17 crc kubenswrapper[4813]: E0320 16:10:17.027790 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c\": container with ID starting with 787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c not found: ID does not exist" containerID="787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c" Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.027821 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c"} err="failed to get container status \"787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c\": rpc error: code = NotFound desc = could not find container \"787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c\": container with ID starting with 787c91bfe881221f3df77ee21e3e78c96ab2b9a4e64604f1f602fab17277604c not found: ID does not exist" Mar 20 16:10:17 crc kubenswrapper[4813]: I0320 16:10:17.280222 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" path="/var/lib/kubelet/pods/fe5e1a2a-f893-485c-ba42-fac77c6353f9/volumes" Mar 20 16:10:21 crc kubenswrapper[4813]: I0320 16:10:21.271031 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:10:21 crc kubenswrapper[4813]: E0320 16:10:21.271761 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:10:22 crc kubenswrapper[4813]: I0320 16:10:22.046092 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-m4lj9"] Mar 20 16:10:22 crc kubenswrapper[4813]: I0320 16:10:22.052453 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-m4lj9"] Mar 20 16:10:23 crc kubenswrapper[4813]: I0320 16:10:23.287090 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="538d338b-7770-4fd4-95b2-1fe7653e0159" path="/var/lib/kubelet/pods/538d338b-7770-4fd4-95b2-1fe7653e0159/volumes" Mar 20 16:10:34 crc kubenswrapper[4813]: I0320 16:10:34.266032 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:10:34 crc kubenswrapper[4813]: E0320 16:10:34.266872 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:10:37 crc kubenswrapper[4813]: I0320 16:10:37.036704 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-6l7q5"] Mar 20 16:10:37 crc kubenswrapper[4813]: I0320 16:10:37.045776 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-6l7q5"] Mar 20 16:10:37 crc kubenswrapper[4813]: I0320 16:10:37.275043 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cee890fd-7087-4331-958a-6098985056be" path="/var/lib/kubelet/pods/cee890fd-7087-4331-958a-6098985056be/volumes" Mar 20 16:10:49 crc kubenswrapper[4813]: I0320 16:10:49.266232 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:10:49 crc kubenswrapper[4813]: E0320 16:10:49.266960 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:11:01 crc kubenswrapper[4813]: I0320 16:11:01.269674 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:11:01 crc kubenswrapper[4813]: E0320 16:11:01.270313 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:11:14 crc kubenswrapper[4813]: I0320 16:11:14.265941 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:11:14 crc kubenswrapper[4813]: E0320 16:11:14.266905 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:11:16 crc kubenswrapper[4813]: I0320 16:11:16.896856 4813 scope.go:117] "RemoveContainer" containerID="da9612f0cf84d7e69de5fdca75b03e4d1d0366798953ba5e2be48a7317eb04f4" Mar 20 16:11:16 crc kubenswrapper[4813]: I0320 16:11:16.938548 4813 scope.go:117] "RemoveContainer" containerID="b06728261a4222fcffa73e2c79d74b7107e355a6c398f3cdf7e573a5436a63f2" Mar 20 16:11:29 crc kubenswrapper[4813]: I0320 16:11:29.265959 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:11:29 crc kubenswrapper[4813]: E0320 16:11:29.266726 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:11:35 crc kubenswrapper[4813]: I0320 16:11:35.047308 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-854b-account-create-update-hfhhk"] Mar 20 16:11:35 crc kubenswrapper[4813]: I0320 16:11:35.057575 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qqmk2"] Mar 20 16:11:35 crc kubenswrapper[4813]: I0320 16:11:35.066570 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qqmk2"] Mar 20 16:11:35 crc kubenswrapper[4813]: I0320 16:11:35.072497 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-854b-account-create-update-hfhhk"] Mar 20 16:11:35 crc kubenswrapper[4813]: I0320 16:11:35.278235 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60365901-d14b-436f-828e-070910ceccba" path="/var/lib/kubelet/pods/60365901-d14b-436f-828e-070910ceccba/volumes" Mar 20 16:11:35 crc kubenswrapper[4813]: I0320 16:11:35.279273 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deea0819-58b8-4780-8e3c-49dd4185a4e3" path="/var/lib/kubelet/pods/deea0819-58b8-4780-8e3c-49dd4185a4e3/volumes" Mar 20 16:11:42 crc kubenswrapper[4813]: I0320 16:11:42.266092 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:11:42 crc kubenswrapper[4813]: E0320 16:11:42.267269 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:11:54 crc kubenswrapper[4813]: I0320 16:11:54.265987 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:11:54 crc kubenswrapper[4813]: E0320 16:11:54.266713 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.141162 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567052-2pwgn"] Mar 20 16:12:00 crc kubenswrapper[4813]: E0320 16:12:00.142282 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="extract-content" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.142306 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="extract-content" Mar 20 16:12:00 crc kubenswrapper[4813]: E0320 16:12:00.142340 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04dacd92-0c61-482c-8001-f2c738614408" containerName="oc" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.142352 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="04dacd92-0c61-482c-8001-f2c738614408" containerName="oc" Mar 20 16:12:00 crc kubenswrapper[4813]: E0320 16:12:00.142375 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="registry-server" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.142387 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="registry-server" Mar 20 16:12:00 crc kubenswrapper[4813]: E0320 16:12:00.142404 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="extract-utilities" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.142415 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="extract-utilities" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.142707 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="04dacd92-0c61-482c-8001-f2c738614408" containerName="oc" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.142763 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe5e1a2a-f893-485c-ba42-fac77c6353f9" containerName="registry-server" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.143672 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567052-2pwgn" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.146942 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.147010 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.147183 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.150075 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567052-2pwgn"] Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.174978 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5klb9\" (UniqueName: \"kubernetes.io/projected/6c3ffb29-7274-4ce3-be5a-f5819f5ac12f-kube-api-access-5klb9\") pod \"auto-csr-approver-29567052-2pwgn\" (UID: \"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f\") " pod="openshift-infra/auto-csr-approver-29567052-2pwgn" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.276313 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5klb9\" (UniqueName: \"kubernetes.io/projected/6c3ffb29-7274-4ce3-be5a-f5819f5ac12f-kube-api-access-5klb9\") pod \"auto-csr-approver-29567052-2pwgn\" (UID: \"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f\") " pod="openshift-infra/auto-csr-approver-29567052-2pwgn" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.297099 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5klb9\" (UniqueName: \"kubernetes.io/projected/6c3ffb29-7274-4ce3-be5a-f5819f5ac12f-kube-api-access-5klb9\") pod \"auto-csr-approver-29567052-2pwgn\" (UID: \"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f\") " pod="openshift-infra/auto-csr-approver-29567052-2pwgn" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.472760 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567052-2pwgn" Mar 20 16:12:00 crc kubenswrapper[4813]: I0320 16:12:00.928662 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567052-2pwgn"] Mar 20 16:12:01 crc kubenswrapper[4813]: I0320 16:12:01.812425 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567052-2pwgn" event={"ID":"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f","Type":"ContainerStarted","Data":"c0ee83bc935cf49ff8d0f7b6c186b347c1f3a808f1368da9907e4d2314040f8e"} Mar 20 16:12:06 crc kubenswrapper[4813]: I0320 16:12:06.266248 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:12:06 crc kubenswrapper[4813]: E0320 16:12:06.267202 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:12:06 crc kubenswrapper[4813]: I0320 16:12:06.863427 4813 generic.go:334] "Generic (PLEG): container finished" podID="6c3ffb29-7274-4ce3-be5a-f5819f5ac12f" containerID="1dbac01ec63c94244bb30858d747d00b5759b1b627a744d588782563f4881b63" exitCode=0 Mar 20 16:12:06 crc kubenswrapper[4813]: I0320 16:12:06.863512 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567052-2pwgn" event={"ID":"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f","Type":"ContainerDied","Data":"1dbac01ec63c94244bb30858d747d00b5759b1b627a744d588782563f4881b63"} Mar 20 16:12:08 crc kubenswrapper[4813]: I0320 16:12:08.162310 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567052-2pwgn" Mar 20 16:12:08 crc kubenswrapper[4813]: I0320 16:12:08.308806 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5klb9\" (UniqueName: \"kubernetes.io/projected/6c3ffb29-7274-4ce3-be5a-f5819f5ac12f-kube-api-access-5klb9\") pod \"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f\" (UID: \"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f\") " Mar 20 16:12:08 crc kubenswrapper[4813]: I0320 16:12:08.315865 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c3ffb29-7274-4ce3-be5a-f5819f5ac12f-kube-api-access-5klb9" (OuterVolumeSpecName: "kube-api-access-5klb9") pod "6c3ffb29-7274-4ce3-be5a-f5819f5ac12f" (UID: "6c3ffb29-7274-4ce3-be5a-f5819f5ac12f"). InnerVolumeSpecName "kube-api-access-5klb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:12:08 crc kubenswrapper[4813]: I0320 16:12:08.411428 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5klb9\" (UniqueName: \"kubernetes.io/projected/6c3ffb29-7274-4ce3-be5a-f5819f5ac12f-kube-api-access-5klb9\") on node \"crc\" DevicePath \"\"" Mar 20 16:12:08 crc kubenswrapper[4813]: I0320 16:12:08.884048 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567052-2pwgn" event={"ID":"6c3ffb29-7274-4ce3-be5a-f5819f5ac12f","Type":"ContainerDied","Data":"c0ee83bc935cf49ff8d0f7b6c186b347c1f3a808f1368da9907e4d2314040f8e"} Mar 20 16:12:08 crc kubenswrapper[4813]: I0320 16:12:08.884387 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0ee83bc935cf49ff8d0f7b6c186b347c1f3a808f1368da9907e4d2314040f8e" Mar 20 16:12:08 crc kubenswrapper[4813]: I0320 16:12:08.884120 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567052-2pwgn" Mar 20 16:12:09 crc kubenswrapper[4813]: I0320 16:12:09.238803 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567046-rcvzn"] Mar 20 16:12:09 crc kubenswrapper[4813]: I0320 16:12:09.245408 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567046-rcvzn"] Mar 20 16:12:09 crc kubenswrapper[4813]: I0320 16:12:09.276540 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c9c37d6-3b34-48c4-ab05-eea8a0981fc6" path="/var/lib/kubelet/pods/6c9c37d6-3b34-48c4-ab05-eea8a0981fc6/volumes" Mar 20 16:12:17 crc kubenswrapper[4813]: I0320 16:12:17.044699 4813 scope.go:117] "RemoveContainer" containerID="362cd5d2bf6cc0f57ea94b865eb5443198780f5d57408067f59dd32c06442d72" Mar 20 16:12:17 crc kubenswrapper[4813]: I0320 16:12:17.083949 4813 scope.go:117] "RemoveContainer" containerID="49ece6f557be574e976dea645212be60df338fe5cf2d47e9c88a02b3713b6a95" Mar 20 16:12:17 crc kubenswrapper[4813]: I0320 16:12:17.123316 4813 scope.go:117] "RemoveContainer" containerID="7cf14ce9f36d8a6c5b91a01ad085ff8ced781646d780c90da2e7dcda674db211" Mar 20 16:12:17 crc kubenswrapper[4813]: I0320 16:12:17.265555 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:12:17 crc kubenswrapper[4813]: E0320 16:12:17.265894 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:12:19 crc kubenswrapper[4813]: I0320 16:12:19.041583 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd"] Mar 20 16:12:19 crc kubenswrapper[4813]: I0320 16:12:19.052450 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-ghlxd"] Mar 20 16:12:19 crc kubenswrapper[4813]: I0320 16:12:19.275332 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b223213d-37ca-4b68-b871-e88a8c4811bf" path="/var/lib/kubelet/pods/b223213d-37ca-4b68-b871-e88a8c4811bf/volumes" Mar 20 16:12:29 crc kubenswrapper[4813]: I0320 16:12:29.266170 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:12:29 crc kubenswrapper[4813]: E0320 16:12:29.266992 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:12:42 crc kubenswrapper[4813]: I0320 16:12:42.266585 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:12:43 crc kubenswrapper[4813]: I0320 16:12:43.168162 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"2c8b835c39ed1f8951bddf885feb47fab28112a96e06e4846419e61cc8cce03c"} Mar 20 16:13:17 crc kubenswrapper[4813]: I0320 16:13:17.200406 4813 scope.go:117] "RemoveContainer" containerID="36a14dcb7046c156b7b03ea6338b10c4287ac14dd18f2450ab26aea0258ded0d" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.588150 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p9pzz"] Mar 20 16:13:57 crc kubenswrapper[4813]: E0320 16:13:57.589137 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3ffb29-7274-4ce3-be5a-f5819f5ac12f" containerName="oc" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.589168 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3ffb29-7274-4ce3-be5a-f5819f5ac12f" containerName="oc" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.589367 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c3ffb29-7274-4ce3-be5a-f5819f5ac12f" containerName="oc" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.590843 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.599400 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p9pzz"] Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.664793 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-catalog-content\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.665142 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wpdg\" (UniqueName: \"kubernetes.io/projected/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-kube-api-access-6wpdg\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.665265 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-utilities\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.767254 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-utilities\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.767377 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-catalog-content\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.767500 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wpdg\" (UniqueName: \"kubernetes.io/projected/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-kube-api-access-6wpdg\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.767761 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-utilities\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.767946 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-catalog-content\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.789246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wpdg\" (UniqueName: \"kubernetes.io/projected/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-kube-api-access-6wpdg\") pod \"redhat-marketplace-p9pzz\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:57 crc kubenswrapper[4813]: I0320 16:13:57.917003 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:13:58 crc kubenswrapper[4813]: I0320 16:13:58.410417 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p9pzz"] Mar 20 16:13:58 crc kubenswrapper[4813]: I0320 16:13:58.782329 4813 generic.go:334] "Generic (PLEG): container finished" podID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerID="882a5bf9ec8a9a8a7e857b8e34a8d15f1f28246c3acd8aec0be1302f61011777" exitCode=0 Mar 20 16:13:58 crc kubenswrapper[4813]: I0320 16:13:58.782383 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p9pzz" event={"ID":"fb72de5b-bb31-4423-b7f5-d526cf39b4f8","Type":"ContainerDied","Data":"882a5bf9ec8a9a8a7e857b8e34a8d15f1f28246c3acd8aec0be1302f61011777"} Mar 20 16:13:58 crc kubenswrapper[4813]: I0320 16:13:58.782619 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p9pzz" event={"ID":"fb72de5b-bb31-4423-b7f5-d526cf39b4f8","Type":"ContainerStarted","Data":"587e5f3d9690be101599f7253bb26dfe1a0aa0560be842a4cb502a78959e38a4"} Mar 20 16:13:59 crc kubenswrapper[4813]: I0320 16:13:59.794315 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p9pzz" event={"ID":"fb72de5b-bb31-4423-b7f5-d526cf39b4f8","Type":"ContainerStarted","Data":"bbe2683b6e089f99ad7404347e39445dfb50f87563dc0da63f89078ccfa10ae9"} Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.148062 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567054-fglrr"] Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.149418 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567054-fglrr" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.153033 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.153222 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.160925 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567054-fglrr"] Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.163696 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.208582 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trvbh\" (UniqueName: \"kubernetes.io/projected/cbff87fc-a059-46bb-a25d-21485ab668c8-kube-api-access-trvbh\") pod \"auto-csr-approver-29567054-fglrr\" (UID: \"cbff87fc-a059-46bb-a25d-21485ab668c8\") " pod="openshift-infra/auto-csr-approver-29567054-fglrr" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.310304 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trvbh\" (UniqueName: \"kubernetes.io/projected/cbff87fc-a059-46bb-a25d-21485ab668c8-kube-api-access-trvbh\") pod \"auto-csr-approver-29567054-fglrr\" (UID: \"cbff87fc-a059-46bb-a25d-21485ab668c8\") " pod="openshift-infra/auto-csr-approver-29567054-fglrr" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.333965 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trvbh\" (UniqueName: \"kubernetes.io/projected/cbff87fc-a059-46bb-a25d-21485ab668c8-kube-api-access-trvbh\") pod \"auto-csr-approver-29567054-fglrr\" (UID: \"cbff87fc-a059-46bb-a25d-21485ab668c8\") " pod="openshift-infra/auto-csr-approver-29567054-fglrr" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.468217 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567054-fglrr" Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.783710 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567054-fglrr"] Mar 20 16:14:00 crc kubenswrapper[4813]: W0320 16:14:00.793603 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbff87fc_a059_46bb_a25d_21485ab668c8.slice/crio-60852f43499a7f4a1424f49bfd3263e04381cb9dfd2640abfacd97de2b57713b WatchSource:0}: Error finding container 60852f43499a7f4a1424f49bfd3263e04381cb9dfd2640abfacd97de2b57713b: Status 404 returned error can't find the container with id 60852f43499a7f4a1424f49bfd3263e04381cb9dfd2640abfacd97de2b57713b Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.812402 4813 generic.go:334] "Generic (PLEG): container finished" podID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerID="bbe2683b6e089f99ad7404347e39445dfb50f87563dc0da63f89078ccfa10ae9" exitCode=0 Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.812582 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p9pzz" event={"ID":"fb72de5b-bb31-4423-b7f5-d526cf39b4f8","Type":"ContainerDied","Data":"bbe2683b6e089f99ad7404347e39445dfb50f87563dc0da63f89078ccfa10ae9"} Mar 20 16:14:00 crc kubenswrapper[4813]: I0320 16:14:00.816594 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567054-fglrr" event={"ID":"cbff87fc-a059-46bb-a25d-21485ab668c8","Type":"ContainerStarted","Data":"60852f43499a7f4a1424f49bfd3263e04381cb9dfd2640abfacd97de2b57713b"} Mar 20 16:14:01 crc kubenswrapper[4813]: I0320 16:14:01.826641 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p9pzz" event={"ID":"fb72de5b-bb31-4423-b7f5-d526cf39b4f8","Type":"ContainerStarted","Data":"58722e504211de22a3e2374286a39f2814ef309a9bf512256547f77a5da4cf17"} Mar 20 16:14:01 crc kubenswrapper[4813]: I0320 16:14:01.853332 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p9pzz" podStartSLOduration=2.373384309 podStartE2EDuration="4.853313972s" podCreationTimestamp="2026-03-20 16:13:57 +0000 UTC" firstStartedPulling="2026-03-20 16:13:58.783795363 +0000 UTC m=+2168.206498204" lastFinishedPulling="2026-03-20 16:14:01.263725026 +0000 UTC m=+2170.686427867" observedRunningTime="2026-03-20 16:14:01.845728247 +0000 UTC m=+2171.268431088" watchObservedRunningTime="2026-03-20 16:14:01.853313972 +0000 UTC m=+2171.276016813" Mar 20 16:14:02 crc kubenswrapper[4813]: I0320 16:14:02.840292 4813 generic.go:334] "Generic (PLEG): container finished" podID="cbff87fc-a059-46bb-a25d-21485ab668c8" containerID="6f742d180ea65b4d95c4ca8d4becad4e1c9241734664bdcdc3554c8eebb6dd35" exitCode=0 Mar 20 16:14:02 crc kubenswrapper[4813]: I0320 16:14:02.840395 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567054-fglrr" event={"ID":"cbff87fc-a059-46bb-a25d-21485ab668c8","Type":"ContainerDied","Data":"6f742d180ea65b4d95c4ca8d4becad4e1c9241734664bdcdc3554c8eebb6dd35"} Mar 20 16:14:04 crc kubenswrapper[4813]: I0320 16:14:04.222370 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567054-fglrr" Mar 20 16:14:04 crc kubenswrapper[4813]: I0320 16:14:04.302257 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trvbh\" (UniqueName: \"kubernetes.io/projected/cbff87fc-a059-46bb-a25d-21485ab668c8-kube-api-access-trvbh\") pod \"cbff87fc-a059-46bb-a25d-21485ab668c8\" (UID: \"cbff87fc-a059-46bb-a25d-21485ab668c8\") " Mar 20 16:14:04 crc kubenswrapper[4813]: I0320 16:14:04.311066 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbff87fc-a059-46bb-a25d-21485ab668c8-kube-api-access-trvbh" (OuterVolumeSpecName: "kube-api-access-trvbh") pod "cbff87fc-a059-46bb-a25d-21485ab668c8" (UID: "cbff87fc-a059-46bb-a25d-21485ab668c8"). InnerVolumeSpecName "kube-api-access-trvbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:14:04 crc kubenswrapper[4813]: I0320 16:14:04.405098 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trvbh\" (UniqueName: \"kubernetes.io/projected/cbff87fc-a059-46bb-a25d-21485ab668c8-kube-api-access-trvbh\") on node \"crc\" DevicePath \"\"" Mar 20 16:14:04 crc kubenswrapper[4813]: I0320 16:14:04.857233 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567054-fglrr" event={"ID":"cbff87fc-a059-46bb-a25d-21485ab668c8","Type":"ContainerDied","Data":"60852f43499a7f4a1424f49bfd3263e04381cb9dfd2640abfacd97de2b57713b"} Mar 20 16:14:04 crc kubenswrapper[4813]: I0320 16:14:04.857281 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60852f43499a7f4a1424f49bfd3263e04381cb9dfd2640abfacd97de2b57713b" Mar 20 16:14:04 crc kubenswrapper[4813]: I0320 16:14:04.857304 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567054-fglrr" Mar 20 16:14:05 crc kubenswrapper[4813]: I0320 16:14:05.308644 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567048-qs9lf"] Mar 20 16:14:05 crc kubenswrapper[4813]: I0320 16:14:05.316301 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567048-qs9lf"] Mar 20 16:14:07 crc kubenswrapper[4813]: I0320 16:14:07.282433 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32249127-220e-4205-869a-e7435218b685" path="/var/lib/kubelet/pods/32249127-220e-4205-869a-e7435218b685/volumes" Mar 20 16:14:07 crc kubenswrapper[4813]: I0320 16:14:07.922015 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:14:07 crc kubenswrapper[4813]: I0320 16:14:07.922082 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:14:07 crc kubenswrapper[4813]: I0320 16:14:07.971602 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:14:08 crc kubenswrapper[4813]: I0320 16:14:08.923787 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:14:11 crc kubenswrapper[4813]: I0320 16:14:11.569960 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p9pzz"] Mar 20 16:14:11 crc kubenswrapper[4813]: I0320 16:14:11.570379 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p9pzz" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="registry-server" containerID="cri-o://58722e504211de22a3e2374286a39f2814ef309a9bf512256547f77a5da4cf17" gracePeriod=2 Mar 20 16:14:11 crc kubenswrapper[4813]: I0320 16:14:11.912297 4813 generic.go:334] "Generic (PLEG): container finished" podID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerID="58722e504211de22a3e2374286a39f2814ef309a9bf512256547f77a5da4cf17" exitCode=0 Mar 20 16:14:11 crc kubenswrapper[4813]: I0320 16:14:11.912345 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p9pzz" event={"ID":"fb72de5b-bb31-4423-b7f5-d526cf39b4f8","Type":"ContainerDied","Data":"58722e504211de22a3e2374286a39f2814ef309a9bf512256547f77a5da4cf17"} Mar 20 16:14:11 crc kubenswrapper[4813]: I0320 16:14:11.991219 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.143161 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-utilities\") pod \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.143254 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wpdg\" (UniqueName: \"kubernetes.io/projected/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-kube-api-access-6wpdg\") pod \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.143294 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-catalog-content\") pod \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\" (UID: \"fb72de5b-bb31-4423-b7f5-d526cf39b4f8\") " Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.144382 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-utilities" (OuterVolumeSpecName: "utilities") pod "fb72de5b-bb31-4423-b7f5-d526cf39b4f8" (UID: "fb72de5b-bb31-4423-b7f5-d526cf39b4f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.148719 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-kube-api-access-6wpdg" (OuterVolumeSpecName: "kube-api-access-6wpdg") pod "fb72de5b-bb31-4423-b7f5-d526cf39b4f8" (UID: "fb72de5b-bb31-4423-b7f5-d526cf39b4f8"). InnerVolumeSpecName "kube-api-access-6wpdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.170806 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb72de5b-bb31-4423-b7f5-d526cf39b4f8" (UID: "fb72de5b-bb31-4423-b7f5-d526cf39b4f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.244653 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.244688 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wpdg\" (UniqueName: \"kubernetes.io/projected/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-kube-api-access-6wpdg\") on node \"crc\" DevicePath \"\"" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.244698 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb72de5b-bb31-4423-b7f5-d526cf39b4f8-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.925963 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p9pzz" event={"ID":"fb72de5b-bb31-4423-b7f5-d526cf39b4f8","Type":"ContainerDied","Data":"587e5f3d9690be101599f7253bb26dfe1a0aa0560be842a4cb502a78959e38a4"} Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.926018 4813 scope.go:117] "RemoveContainer" containerID="58722e504211de22a3e2374286a39f2814ef309a9bf512256547f77a5da4cf17" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.926075 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p9pzz" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.969003 4813 scope.go:117] "RemoveContainer" containerID="bbe2683b6e089f99ad7404347e39445dfb50f87563dc0da63f89078ccfa10ae9" Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.973055 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p9pzz"] Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.979314 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p9pzz"] Mar 20 16:14:12 crc kubenswrapper[4813]: I0320 16:14:12.999296 4813 scope.go:117] "RemoveContainer" containerID="882a5bf9ec8a9a8a7e857b8e34a8d15f1f28246c3acd8aec0be1302f61011777" Mar 20 16:14:13 crc kubenswrapper[4813]: I0320 16:14:13.284773 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" path="/var/lib/kubelet/pods/fb72de5b-bb31-4423-b7f5-d526cf39b4f8/volumes" Mar 20 16:14:17 crc kubenswrapper[4813]: I0320 16:14:17.258635 4813 scope.go:117] "RemoveContainer" containerID="2c04abcd2fa036ef776b351a6d03d03d33ed7ec1e57305d0a59c14eeb9fb899e" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.139964 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6"] Mar 20 16:15:00 crc kubenswrapper[4813]: E0320 16:15:00.140995 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="registry-server" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.141012 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="registry-server" Mar 20 16:15:00 crc kubenswrapper[4813]: E0320 16:15:00.141028 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="extract-utilities" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.141036 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="extract-utilities" Mar 20 16:15:00 crc kubenswrapper[4813]: E0320 16:15:00.141058 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="extract-content" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.141067 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="extract-content" Mar 20 16:15:00 crc kubenswrapper[4813]: E0320 16:15:00.141084 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbff87fc-a059-46bb-a25d-21485ab668c8" containerName="oc" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.141091 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbff87fc-a059-46bb-a25d-21485ab668c8" containerName="oc" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.141292 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbff87fc-a059-46bb-a25d-21485ab668c8" containerName="oc" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.141319 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb72de5b-bb31-4423-b7f5-d526cf39b4f8" containerName="registry-server" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.142055 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.144087 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.144175 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.147769 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6"] Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.218979 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dea0d206-5d4e-48d7-aa3e-491be6ecf465-config-volume\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.219039 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dea0d206-5d4e-48d7-aa3e-491be6ecf465-secret-volume\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.219223 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9w9d\" (UniqueName: \"kubernetes.io/projected/dea0d206-5d4e-48d7-aa3e-491be6ecf465-kube-api-access-m9w9d\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.320973 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dea0d206-5d4e-48d7-aa3e-491be6ecf465-config-volume\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.321035 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dea0d206-5d4e-48d7-aa3e-491be6ecf465-secret-volume\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.322006 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dea0d206-5d4e-48d7-aa3e-491be6ecf465-config-volume\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.322041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9w9d\" (UniqueName: \"kubernetes.io/projected/dea0d206-5d4e-48d7-aa3e-491be6ecf465-kube-api-access-m9w9d\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.327000 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dea0d206-5d4e-48d7-aa3e-491be6ecf465-secret-volume\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.339681 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9w9d\" (UniqueName: \"kubernetes.io/projected/dea0d206-5d4e-48d7-aa3e-491be6ecf465-kube-api-access-m9w9d\") pod \"collect-profiles-29567055-z6bf6\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.467290 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:00 crc kubenswrapper[4813]: I0320 16:15:00.942593 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6"] Mar 20 16:15:01 crc kubenswrapper[4813]: I0320 16:15:01.318434 4813 generic.go:334] "Generic (PLEG): container finished" podID="dea0d206-5d4e-48d7-aa3e-491be6ecf465" containerID="69529ec9d9308bd5010dfc6c722eeb6669ae742de3701c95db4eed2da8164be4" exitCode=0 Mar 20 16:15:01 crc kubenswrapper[4813]: I0320 16:15:01.318475 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" event={"ID":"dea0d206-5d4e-48d7-aa3e-491be6ecf465","Type":"ContainerDied","Data":"69529ec9d9308bd5010dfc6c722eeb6669ae742de3701c95db4eed2da8164be4"} Mar 20 16:15:01 crc kubenswrapper[4813]: I0320 16:15:01.318521 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" event={"ID":"dea0d206-5d4e-48d7-aa3e-491be6ecf465","Type":"ContainerStarted","Data":"be4b86db960fa2cffc526970b514d4ad812a7fdb915d09eedad5c4d84fb580b2"} Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.627238 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.758697 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dea0d206-5d4e-48d7-aa3e-491be6ecf465-secret-volume\") pod \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.758876 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dea0d206-5d4e-48d7-aa3e-491be6ecf465-config-volume\") pod \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.758974 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9w9d\" (UniqueName: \"kubernetes.io/projected/dea0d206-5d4e-48d7-aa3e-491be6ecf465-kube-api-access-m9w9d\") pod \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\" (UID: \"dea0d206-5d4e-48d7-aa3e-491be6ecf465\") " Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.760244 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dea0d206-5d4e-48d7-aa3e-491be6ecf465-config-volume" (OuterVolumeSpecName: "config-volume") pod "dea0d206-5d4e-48d7-aa3e-491be6ecf465" (UID: "dea0d206-5d4e-48d7-aa3e-491be6ecf465"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.764162 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dea0d206-5d4e-48d7-aa3e-491be6ecf465-kube-api-access-m9w9d" (OuterVolumeSpecName: "kube-api-access-m9w9d") pod "dea0d206-5d4e-48d7-aa3e-491be6ecf465" (UID: "dea0d206-5d4e-48d7-aa3e-491be6ecf465"). InnerVolumeSpecName "kube-api-access-m9w9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.764595 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dea0d206-5d4e-48d7-aa3e-491be6ecf465-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dea0d206-5d4e-48d7-aa3e-491be6ecf465" (UID: "dea0d206-5d4e-48d7-aa3e-491be6ecf465"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.860394 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dea0d206-5d4e-48d7-aa3e-491be6ecf465-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.860428 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dea0d206-5d4e-48d7-aa3e-491be6ecf465-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 16:15:02 crc kubenswrapper[4813]: I0320 16:15:02.860440 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9w9d\" (UniqueName: \"kubernetes.io/projected/dea0d206-5d4e-48d7-aa3e-491be6ecf465-kube-api-access-m9w9d\") on node \"crc\" DevicePath \"\"" Mar 20 16:15:03 crc kubenswrapper[4813]: I0320 16:15:03.351245 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" event={"ID":"dea0d206-5d4e-48d7-aa3e-491be6ecf465","Type":"ContainerDied","Data":"be4b86db960fa2cffc526970b514d4ad812a7fdb915d09eedad5c4d84fb580b2"} Mar 20 16:15:03 crc kubenswrapper[4813]: I0320 16:15:03.351290 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be4b86db960fa2cffc526970b514d4ad812a7fdb915d09eedad5c4d84fb580b2" Mar 20 16:15:03 crc kubenswrapper[4813]: I0320 16:15:03.351352 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567055-z6bf6" Mar 20 16:15:03 crc kubenswrapper[4813]: I0320 16:15:03.704777 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5"] Mar 20 16:15:03 crc kubenswrapper[4813]: I0320 16:15:03.710848 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567010-62qb5"] Mar 20 16:15:03 crc kubenswrapper[4813]: I0320 16:15:03.842856 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:15:03 crc kubenswrapper[4813]: I0320 16:15:03.843154 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:15:05 crc kubenswrapper[4813]: I0320 16:15:05.284416 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8878ff8-6310-45c3-adcb-116e36a2dbde" path="/var/lib/kubelet/pods/c8878ff8-6310-45c3-adcb-116e36a2dbde/volumes" Mar 20 16:15:17 crc kubenswrapper[4813]: I0320 16:15:17.340401 4813 scope.go:117] "RemoveContainer" containerID="d1b0e96f44f5cc0fa2eacab059b56866043aadc93aea98dc70b7a30ef5639e85" Mar 20 16:15:33 crc kubenswrapper[4813]: I0320 16:15:33.842763 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:15:33 crc kubenswrapper[4813]: I0320 16:15:33.843802 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.161284 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567056-mzh9b"] Mar 20 16:16:00 crc kubenswrapper[4813]: E0320 16:16:00.162199 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dea0d206-5d4e-48d7-aa3e-491be6ecf465" containerName="collect-profiles" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.162213 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dea0d206-5d4e-48d7-aa3e-491be6ecf465" containerName="collect-profiles" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.162416 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="dea0d206-5d4e-48d7-aa3e-491be6ecf465" containerName="collect-profiles" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.163164 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567056-mzh9b" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.165587 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.165819 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.165930 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.178684 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567056-mzh9b"] Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.348642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4p8z\" (UniqueName: \"kubernetes.io/projected/fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0-kube-api-access-w4p8z\") pod \"auto-csr-approver-29567056-mzh9b\" (UID: \"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0\") " pod="openshift-infra/auto-csr-approver-29567056-mzh9b" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.450917 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4p8z\" (UniqueName: \"kubernetes.io/projected/fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0-kube-api-access-w4p8z\") pod \"auto-csr-approver-29567056-mzh9b\" (UID: \"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0\") " pod="openshift-infra/auto-csr-approver-29567056-mzh9b" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.477555 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4p8z\" (UniqueName: \"kubernetes.io/projected/fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0-kube-api-access-w4p8z\") pod \"auto-csr-approver-29567056-mzh9b\" (UID: \"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0\") " pod="openshift-infra/auto-csr-approver-29567056-mzh9b" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.485532 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567056-mzh9b" Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.971602 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567056-mzh9b"] Mar 20 16:16:00 crc kubenswrapper[4813]: I0320 16:16:00.979849 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 16:16:01 crc kubenswrapper[4813]: I0320 16:16:01.852650 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567056-mzh9b" event={"ID":"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0","Type":"ContainerStarted","Data":"ba7b6242bbf0039ba88e265a49e23989f4d6023b8ffa031395d0f4d6d32aa361"} Mar 20 16:16:02 crc kubenswrapper[4813]: I0320 16:16:02.982152 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tc8x2"] Mar 20 16:16:02 crc kubenswrapper[4813]: I0320 16:16:02.986439 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.022425 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tc8x2"] Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.104520 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-utilities\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.104823 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcfp5\" (UniqueName: \"kubernetes.io/projected/477634d9-2be3-4e60-8dfd-88f94651cf59-kube-api-access-vcfp5\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.104848 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-catalog-content\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.206223 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcfp5\" (UniqueName: \"kubernetes.io/projected/477634d9-2be3-4e60-8dfd-88f94651cf59-kube-api-access-vcfp5\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.206271 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-catalog-content\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.206339 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-utilities\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.207006 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-utilities\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.207090 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-catalog-content\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.228654 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcfp5\" (UniqueName: \"kubernetes.io/projected/477634d9-2be3-4e60-8dfd-88f94651cf59-kube-api-access-vcfp5\") pod \"community-operators-tc8x2\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.341303 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.842290 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.842647 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.842692 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.843306 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2c8b835c39ed1f8951bddf885feb47fab28112a96e06e4846419e61cc8cce03c"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.843365 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://2c8b835c39ed1f8951bddf885feb47fab28112a96e06e4846419e61cc8cce03c" gracePeriod=600 Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.875828 4813 generic.go:334] "Generic (PLEG): container finished" podID="fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0" containerID="900501ec59493b38a0a70e8c9ca1eeb32d7a6965acd8a5e5160442dd1f325f15" exitCode=0 Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.875889 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567056-mzh9b" event={"ID":"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0","Type":"ContainerDied","Data":"900501ec59493b38a0a70e8c9ca1eeb32d7a6965acd8a5e5160442dd1f325f15"} Mar 20 16:16:03 crc kubenswrapper[4813]: I0320 16:16:03.878747 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tc8x2"] Mar 20 16:16:04 crc kubenswrapper[4813]: I0320 16:16:04.884798 4813 generic.go:334] "Generic (PLEG): container finished" podID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerID="77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb" exitCode=0 Mar 20 16:16:04 crc kubenswrapper[4813]: I0320 16:16:04.884847 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc8x2" event={"ID":"477634d9-2be3-4e60-8dfd-88f94651cf59","Type":"ContainerDied","Data":"77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb"} Mar 20 16:16:04 crc kubenswrapper[4813]: I0320 16:16:04.886201 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc8x2" event={"ID":"477634d9-2be3-4e60-8dfd-88f94651cf59","Type":"ContainerStarted","Data":"4611462032d41510e9509f935177274f380ca79cb6ee12210c8b28786602e682"} Mar 20 16:16:04 crc kubenswrapper[4813]: I0320 16:16:04.889406 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="2c8b835c39ed1f8951bddf885feb47fab28112a96e06e4846419e61cc8cce03c" exitCode=0 Mar 20 16:16:04 crc kubenswrapper[4813]: I0320 16:16:04.889607 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"2c8b835c39ed1f8951bddf885feb47fab28112a96e06e4846419e61cc8cce03c"} Mar 20 16:16:04 crc kubenswrapper[4813]: I0320 16:16:04.889632 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2"} Mar 20 16:16:04 crc kubenswrapper[4813]: I0320 16:16:04.889669 4813 scope.go:117] "RemoveContainer" containerID="0c9471c7984d244907e553b527deb02f5e134c486d58e2b673a5f4701f10ddb4" Mar 20 16:16:05 crc kubenswrapper[4813]: I0320 16:16:05.203618 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567056-mzh9b" Mar 20 16:16:05 crc kubenswrapper[4813]: I0320 16:16:05.378804 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4p8z\" (UniqueName: \"kubernetes.io/projected/fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0-kube-api-access-w4p8z\") pod \"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0\" (UID: \"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0\") " Mar 20 16:16:05 crc kubenswrapper[4813]: I0320 16:16:05.385029 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0-kube-api-access-w4p8z" (OuterVolumeSpecName: "kube-api-access-w4p8z") pod "fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0" (UID: "fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0"). InnerVolumeSpecName "kube-api-access-w4p8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:16:05 crc kubenswrapper[4813]: I0320 16:16:05.481548 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4p8z\" (UniqueName: \"kubernetes.io/projected/fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0-kube-api-access-w4p8z\") on node \"crc\" DevicePath \"\"" Mar 20 16:16:05 crc kubenswrapper[4813]: I0320 16:16:05.902694 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567056-mzh9b" event={"ID":"fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0","Type":"ContainerDied","Data":"ba7b6242bbf0039ba88e265a49e23989f4d6023b8ffa031395d0f4d6d32aa361"} Mar 20 16:16:05 crc kubenswrapper[4813]: I0320 16:16:05.902994 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba7b6242bbf0039ba88e265a49e23989f4d6023b8ffa031395d0f4d6d32aa361" Mar 20 16:16:05 crc kubenswrapper[4813]: I0320 16:16:05.902729 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567056-mzh9b" Mar 20 16:16:06 crc kubenswrapper[4813]: I0320 16:16:06.277635 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567050-mb2hn"] Mar 20 16:16:06 crc kubenswrapper[4813]: I0320 16:16:06.286686 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567050-mb2hn"] Mar 20 16:16:06 crc kubenswrapper[4813]: I0320 16:16:06.915339 4813 generic.go:334] "Generic (PLEG): container finished" podID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerID="c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690" exitCode=0 Mar 20 16:16:06 crc kubenswrapper[4813]: I0320 16:16:06.915666 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc8x2" event={"ID":"477634d9-2be3-4e60-8dfd-88f94651cf59","Type":"ContainerDied","Data":"c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690"} Mar 20 16:16:07 crc kubenswrapper[4813]: I0320 16:16:07.282786 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04dacd92-0c61-482c-8001-f2c738614408" path="/var/lib/kubelet/pods/04dacd92-0c61-482c-8001-f2c738614408/volumes" Mar 20 16:16:07 crc kubenswrapper[4813]: I0320 16:16:07.926577 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc8x2" event={"ID":"477634d9-2be3-4e60-8dfd-88f94651cf59","Type":"ContainerStarted","Data":"963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da"} Mar 20 16:16:07 crc kubenswrapper[4813]: I0320 16:16:07.949068 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tc8x2" podStartSLOduration=3.5508936650000003 podStartE2EDuration="5.949047472s" podCreationTimestamp="2026-03-20 16:16:02 +0000 UTC" firstStartedPulling="2026-03-20 16:16:04.887401395 +0000 UTC m=+2294.310104236" lastFinishedPulling="2026-03-20 16:16:07.285555202 +0000 UTC m=+2296.708258043" observedRunningTime="2026-03-20 16:16:07.944402146 +0000 UTC m=+2297.367104987" watchObservedRunningTime="2026-03-20 16:16:07.949047472 +0000 UTC m=+2297.371750323" Mar 20 16:16:13 crc kubenswrapper[4813]: I0320 16:16:13.342150 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:13 crc kubenswrapper[4813]: I0320 16:16:13.343803 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:13 crc kubenswrapper[4813]: I0320 16:16:13.399043 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:14 crc kubenswrapper[4813]: I0320 16:16:14.019536 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:16 crc kubenswrapper[4813]: I0320 16:16:16.974307 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tc8x2"] Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.018286 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tc8x2" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="registry-server" containerID="cri-o://963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da" gracePeriod=2 Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.401484 4813 scope.go:117] "RemoveContainer" containerID="0b69ba0e439619bc3415276a15f214671143d15759cf0b107b80fcb4942e9d00" Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.458956 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.580387 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-utilities\") pod \"477634d9-2be3-4e60-8dfd-88f94651cf59\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.580644 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-catalog-content\") pod \"477634d9-2be3-4e60-8dfd-88f94651cf59\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.580701 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcfp5\" (UniqueName: \"kubernetes.io/projected/477634d9-2be3-4e60-8dfd-88f94651cf59-kube-api-access-vcfp5\") pod \"477634d9-2be3-4e60-8dfd-88f94651cf59\" (UID: \"477634d9-2be3-4e60-8dfd-88f94651cf59\") " Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.581553 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-utilities" (OuterVolumeSpecName: "utilities") pod "477634d9-2be3-4e60-8dfd-88f94651cf59" (UID: "477634d9-2be3-4e60-8dfd-88f94651cf59"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.588744 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477634d9-2be3-4e60-8dfd-88f94651cf59-kube-api-access-vcfp5" (OuterVolumeSpecName: "kube-api-access-vcfp5") pod "477634d9-2be3-4e60-8dfd-88f94651cf59" (UID: "477634d9-2be3-4e60-8dfd-88f94651cf59"). InnerVolumeSpecName "kube-api-access-vcfp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.683110 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:16:17 crc kubenswrapper[4813]: I0320 16:16:17.683140 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcfp5\" (UniqueName: \"kubernetes.io/projected/477634d9-2be3-4e60-8dfd-88f94651cf59-kube-api-access-vcfp5\") on node \"crc\" DevicePath \"\"" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.036113 4813 generic.go:334] "Generic (PLEG): container finished" podID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerID="963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da" exitCode=0 Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.036164 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc8x2" event={"ID":"477634d9-2be3-4e60-8dfd-88f94651cf59","Type":"ContainerDied","Data":"963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da"} Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.036193 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tc8x2" event={"ID":"477634d9-2be3-4e60-8dfd-88f94651cf59","Type":"ContainerDied","Data":"4611462032d41510e9509f935177274f380ca79cb6ee12210c8b28786602e682"} Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.036215 4813 scope.go:117] "RemoveContainer" containerID="963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.036362 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tc8x2" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.065211 4813 scope.go:117] "RemoveContainer" containerID="c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.086740 4813 scope.go:117] "RemoveContainer" containerID="77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.106214 4813 scope.go:117] "RemoveContainer" containerID="963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da" Mar 20 16:16:18 crc kubenswrapper[4813]: E0320 16:16:18.106852 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da\": container with ID starting with 963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da not found: ID does not exist" containerID="963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.106891 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da"} err="failed to get container status \"963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da\": rpc error: code = NotFound desc = could not find container \"963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da\": container with ID starting with 963d4939ca77af6d9b2dac435f3c92dfdbea706d4814e81c157e1f8b324659da not found: ID does not exist" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.106916 4813 scope.go:117] "RemoveContainer" containerID="c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690" Mar 20 16:16:18 crc kubenswrapper[4813]: E0320 16:16:18.107340 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690\": container with ID starting with c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690 not found: ID does not exist" containerID="c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.107365 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690"} err="failed to get container status \"c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690\": rpc error: code = NotFound desc = could not find container \"c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690\": container with ID starting with c808fa7e474ee284e0f1dd21d83b613cc28f2ffcba8db86c2416c7561b280690 not found: ID does not exist" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.107382 4813 scope.go:117] "RemoveContainer" containerID="77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb" Mar 20 16:16:18 crc kubenswrapper[4813]: E0320 16:16:18.107750 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb\": container with ID starting with 77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb not found: ID does not exist" containerID="77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.107783 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb"} err="failed to get container status \"77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb\": rpc error: code = NotFound desc = could not find container \"77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb\": container with ID starting with 77de6a9af2c9fbedd629dc84070f630502e9c75f195f3551cb501808f6df85eb not found: ID does not exist" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.611593 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "477634d9-2be3-4e60-8dfd-88f94651cf59" (UID: "477634d9-2be3-4e60-8dfd-88f94651cf59"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.665313 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tc8x2"] Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.670911 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tc8x2"] Mar 20 16:16:18 crc kubenswrapper[4813]: I0320 16:16:18.699458 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477634d9-2be3-4e60-8dfd-88f94651cf59-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:16:19 crc kubenswrapper[4813]: I0320 16:16:19.275349 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" path="/var/lib/kubelet/pods/477634d9-2be3-4e60-8dfd-88f94651cf59/volumes" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.835969 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher854b-account-delete-r6twr"] Mar 20 16:17:33 crc kubenswrapper[4813]: E0320 16:17:33.836937 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="extract-content" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.836955 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="extract-content" Mar 20 16:17:33 crc kubenswrapper[4813]: E0320 16:17:33.836981 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0" containerName="oc" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.836990 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0" containerName="oc" Mar 20 16:17:33 crc kubenswrapper[4813]: E0320 16:17:33.837005 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="registry-server" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.837013 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="registry-server" Mar 20 16:17:33 crc kubenswrapper[4813]: E0320 16:17:33.837055 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="extract-utilities" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.837063 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="extract-utilities" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.837271 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0" containerName="oc" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.837284 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="477634d9-2be3-4e60-8dfd-88f94651cf59" containerName="registry-server" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.838211 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.857264 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher854b-account-delete-r6twr"] Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.877566 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.877794 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="5081b10c-49a8-415e-bffa-ed620820005f" containerName="watcher-applier" containerID="cri-o://533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" gracePeriod=30 Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.935886 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-operator-scripts\") pod \"watcher854b-account-delete-r6twr\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.935969 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdnld\" (UniqueName: \"kubernetes.io/projected/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-kube-api-access-jdnld\") pod \"watcher854b-account-delete-r6twr\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.947771 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.947995 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="a6bcc686-f006-49f4-9451-8184fb2cce32" containerName="watcher-decision-engine" containerID="cri-o://99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" gracePeriod=30 Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.981431 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.985705 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-kuttl-api-log" containerID="cri-o://6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b" gracePeriod=30 Mar 20 16:17:33 crc kubenswrapper[4813]: I0320 16:17:33.985847 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-api" containerID="cri-o://8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79" gracePeriod=30 Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.038950 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdnld\" (UniqueName: \"kubernetes.io/projected/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-kube-api-access-jdnld\") pod \"watcher854b-account-delete-r6twr\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.039815 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-operator-scripts\") pod \"watcher854b-account-delete-r6twr\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.040628 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-operator-scripts\") pod \"watcher854b-account-delete-r6twr\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.065470 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdnld\" (UniqueName: \"kubernetes.io/projected/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-kube-api-access-jdnld\") pod \"watcher854b-account-delete-r6twr\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.156840 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.748660 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher854b-account-delete-r6twr"] Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.928380 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" event={"ID":"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b","Type":"ContainerStarted","Data":"646e0ad93d77f50a1c2e1fc47c7b5896c1f701741ea000ccb3bde7bbf4bb67ea"} Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.931455 4813 generic.go:334] "Generic (PLEG): container finished" podID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerID="6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b" exitCode=143 Mar 20 16:17:34 crc kubenswrapper[4813]: I0320 16:17:34.931520 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"c6fc429d-e3c2-40bc-8146-107dbf8dc43e","Type":"ContainerDied","Data":"6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b"} Mar 20 16:17:35 crc kubenswrapper[4813]: E0320 16:17:35.280585 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:17:35 crc kubenswrapper[4813]: E0320 16:17:35.282249 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:17:35 crc kubenswrapper[4813]: E0320 16:17:35.283979 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:17:35 crc kubenswrapper[4813]: E0320 16:17:35.284020 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="5081b10c-49a8-415e-bffa-ed620820005f" containerName="watcher-applier" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.516210 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.565271 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-config-data\") pod \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.565340 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-822m9\" (UniqueName: \"kubernetes.io/projected/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-kube-api-access-822m9\") pod \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.565402 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-custom-prometheus-ca\") pod \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.565464 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-combined-ca-bundle\") pod \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.565523 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-logs\") pod \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\" (UID: \"c6fc429d-e3c2-40bc-8146-107dbf8dc43e\") " Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.566159 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-logs" (OuterVolumeSpecName: "logs") pod "c6fc429d-e3c2-40bc-8146-107dbf8dc43e" (UID: "c6fc429d-e3c2-40bc-8146-107dbf8dc43e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.592522 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-kube-api-access-822m9" (OuterVolumeSpecName: "kube-api-access-822m9") pod "c6fc429d-e3c2-40bc-8146-107dbf8dc43e" (UID: "c6fc429d-e3c2-40bc-8146-107dbf8dc43e"). InnerVolumeSpecName "kube-api-access-822m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.606665 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6fc429d-e3c2-40bc-8146-107dbf8dc43e" (UID: "c6fc429d-e3c2-40bc-8146-107dbf8dc43e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.606795 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "c6fc429d-e3c2-40bc-8146-107dbf8dc43e" (UID: "c6fc429d-e3c2-40bc-8146-107dbf8dc43e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.624885 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-config-data" (OuterVolumeSpecName: "config-data") pod "c6fc429d-e3c2-40bc-8146-107dbf8dc43e" (UID: "c6fc429d-e3c2-40bc-8146-107dbf8dc43e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.666837 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.666878 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-822m9\" (UniqueName: \"kubernetes.io/projected/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-kube-api-access-822m9\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.666890 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.666899 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.666909 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6fc429d-e3c2-40bc-8146-107dbf8dc43e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.940012 4813 generic.go:334] "Generic (PLEG): container finished" podID="1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b" containerID="41ecc9418f1c1817ac8bda9b30b0fe272103b0019c9a0f98c99f3c5c05a384e2" exitCode=0 Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.940086 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" event={"ID":"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b","Type":"ContainerDied","Data":"41ecc9418f1c1817ac8bda9b30b0fe272103b0019c9a0f98c99f3c5c05a384e2"} Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.942184 4813 generic.go:334] "Generic (PLEG): container finished" podID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerID="8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79" exitCode=0 Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.942226 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.942225 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"c6fc429d-e3c2-40bc-8146-107dbf8dc43e","Type":"ContainerDied","Data":"8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79"} Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.942348 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"c6fc429d-e3c2-40bc-8146-107dbf8dc43e","Type":"ContainerDied","Data":"850441be6332afff7ac7263a5b7d431e93eec1e37abf732e6e9ba16308678613"} Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.942373 4813 scope.go:117] "RemoveContainer" containerID="8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.971650 4813 scope.go:117] "RemoveContainer" containerID="6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.988657 4813 scope.go:117] "RemoveContainer" containerID="8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79" Mar 20 16:17:35 crc kubenswrapper[4813]: E0320 16:17:35.991833 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79\": container with ID starting with 8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79 not found: ID does not exist" containerID="8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.991900 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79"} err="failed to get container status \"8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79\": rpc error: code = NotFound desc = could not find container \"8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79\": container with ID starting with 8c28d03dccb45d67ede342020e9de2383470faec6eb7331369649db8883e5a79 not found: ID does not exist" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.991930 4813 scope.go:117] "RemoveContainer" containerID="6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b" Mar 20 16:17:35 crc kubenswrapper[4813]: E0320 16:17:35.992972 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b\": container with ID starting with 6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b not found: ID does not exist" containerID="6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b" Mar 20 16:17:35 crc kubenswrapper[4813]: I0320 16:17:35.993019 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b"} err="failed to get container status \"6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b\": rpc error: code = NotFound desc = could not find container \"6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b\": container with ID starting with 6d225a573c357d542923b02115949c0d2d21f1d507dc3c0a42b122998231576b not found: ID does not exist" Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.008111 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.017189 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.633588 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.633914 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-central-agent" containerID="cri-o://cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45" gracePeriod=30 Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.634001 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-notification-agent" containerID="cri-o://47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4" gracePeriod=30 Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.634012 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="proxy-httpd" containerID="cri-o://25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d" gracePeriod=30 Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.634104 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="sg-core" containerID="cri-o://61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a" gracePeriod=30 Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.951797 4813 generic.go:334] "Generic (PLEG): container finished" podID="36a54386-f521-421f-8dd9-48393304cec4" containerID="25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d" exitCode=0 Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.951835 4813 generic.go:334] "Generic (PLEG): container finished" podID="36a54386-f521-421f-8dd9-48393304cec4" containerID="61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a" exitCode=2 Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.951862 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerDied","Data":"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d"} Mar 20 16:17:36 crc kubenswrapper[4813]: I0320 16:17:36.951905 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerDied","Data":"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a"} Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.268443 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.277308 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" path="/var/lib/kubelet/pods/c6fc429d-e3c2-40bc-8146-107dbf8dc43e/volumes" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.298560 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdnld\" (UniqueName: \"kubernetes.io/projected/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-kube-api-access-jdnld\") pod \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.298725 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-operator-scripts\") pod \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\" (UID: \"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.299787 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b" (UID: "1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.316673 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-kube-api-access-jdnld" (OuterVolumeSpecName: "kube-api-access-jdnld") pod "1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b" (UID: "1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b"). InnerVolumeSpecName "kube-api-access-jdnld". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.401162 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.401199 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdnld\" (UniqueName: \"kubernetes.io/projected/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b-kube-api-access-jdnld\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.750253 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911060 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-combined-ca-bundle\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911186 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-ceilometer-tls-certs\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911225 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-config-data\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911311 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-sg-core-conf-yaml\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911333 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-scripts\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911376 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-log-httpd\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911421 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgf4l\" (UniqueName: \"kubernetes.io/projected/36a54386-f521-421f-8dd9-48393304cec4-kube-api-access-mgf4l\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.911456 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-run-httpd\") pod \"36a54386-f521-421f-8dd9-48393304cec4\" (UID: \"36a54386-f521-421f-8dd9-48393304cec4\") " Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.912024 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.912139 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.929718 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36a54386-f521-421f-8dd9-48393304cec4-kube-api-access-mgf4l" (OuterVolumeSpecName: "kube-api-access-mgf4l") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "kube-api-access-mgf4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.931679 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-scripts" (OuterVolumeSpecName: "scripts") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.962630 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.973778 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" event={"ID":"1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b","Type":"ContainerDied","Data":"646e0ad93d77f50a1c2e1fc47c7b5896c1f701741ea000ccb3bde7bbf4bb67ea"} Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.973825 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="646e0ad93d77f50a1c2e1fc47c7b5896c1f701741ea000ccb3bde7bbf4bb67ea" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.973877 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher854b-account-delete-r6twr" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.977516 4813 generic.go:334] "Generic (PLEG): container finished" podID="36a54386-f521-421f-8dd9-48393304cec4" containerID="47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4" exitCode=0 Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.977547 4813 generic.go:334] "Generic (PLEG): container finished" podID="36a54386-f521-421f-8dd9-48393304cec4" containerID="cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45" exitCode=0 Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.977571 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerDied","Data":"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4"} Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.977600 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerDied","Data":"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45"} Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.977610 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"36a54386-f521-421f-8dd9-48393304cec4","Type":"ContainerDied","Data":"edb84e850a811c15e23c94cedaac15e9fcdde318f3c6a1ba069c60c0da1900ff"} Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.977627 4813 scope.go:117] "RemoveContainer" containerID="25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.977744 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:37 crc kubenswrapper[4813]: I0320 16:17:37.981600 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.014270 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.014338 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgf4l\" (UniqueName: \"kubernetes.io/projected/36a54386-f521-421f-8dd9-48393304cec4-kube-api-access-mgf4l\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.014351 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36a54386-f521-421f-8dd9-48393304cec4-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.014361 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.014411 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.014424 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.020643 4813 scope.go:117] "RemoveContainer" containerID="61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.083689 4813 scope.go:117] "RemoveContainer" containerID="47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.101347 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-config-data" (OuterVolumeSpecName: "config-data") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.115454 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.116028 4813 scope.go:117] "RemoveContainer" containerID="cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.116668 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36a54386-f521-421f-8dd9-48393304cec4" (UID: "36a54386-f521-421f-8dd9-48393304cec4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.170775 4813 scope.go:117] "RemoveContainer" containerID="25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.171295 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d\": container with ID starting with 25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d not found: ID does not exist" containerID="25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.171354 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d"} err="failed to get container status \"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d\": rpc error: code = NotFound desc = could not find container \"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d\": container with ID starting with 25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.171385 4813 scope.go:117] "RemoveContainer" containerID="61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.171781 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a\": container with ID starting with 61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a not found: ID does not exist" containerID="61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.171831 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a"} err="failed to get container status \"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a\": rpc error: code = NotFound desc = could not find container \"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a\": container with ID starting with 61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.171860 4813 scope.go:117] "RemoveContainer" containerID="47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.172169 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4\": container with ID starting with 47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4 not found: ID does not exist" containerID="47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.172204 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4"} err="failed to get container status \"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4\": rpc error: code = NotFound desc = could not find container \"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4\": container with ID starting with 47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4 not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.172223 4813 scope.go:117] "RemoveContainer" containerID="cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.172553 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45\": container with ID starting with cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45 not found: ID does not exist" containerID="cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.172600 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45"} err="failed to get container status \"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45\": rpc error: code = NotFound desc = could not find container \"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45\": container with ID starting with cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45 not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.172629 4813 scope.go:117] "RemoveContainer" containerID="25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.172923 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d"} err="failed to get container status \"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d\": rpc error: code = NotFound desc = could not find container \"25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d\": container with ID starting with 25547c51e63e19da74328357707f73ba227819897ed64557fab8c0a24468920d not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.172947 4813 scope.go:117] "RemoveContainer" containerID="61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.173208 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a"} err="failed to get container status \"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a\": rpc error: code = NotFound desc = could not find container \"61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a\": container with ID starting with 61d4acc664d8cc2a076dff17503dafb323e66684d8155b872a13e2cd83fb6f1a not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.173229 4813 scope.go:117] "RemoveContainer" containerID="47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.173699 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4"} err="failed to get container status \"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4\": rpc error: code = NotFound desc = could not find container \"47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4\": container with ID starting with 47ae15a99c4e9683a213346bc0220929879afd30f6977ccf48137bf5384e8de4 not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.173721 4813 scope.go:117] "RemoveContainer" containerID="cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.173968 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45"} err="failed to get container status \"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45\": rpc error: code = NotFound desc = could not find container \"cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45\": container with ID starting with cde7213539f4fb5ee5cde3c1715c07dad189edef0334407a0eb2866ba61b5b45 not found: ID does not exist" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.216948 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36a54386-f521-421f-8dd9-48393304cec4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.242190 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05 is running failed: container process not found" containerID="99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.244546 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05 is running failed: container process not found" containerID="99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.244910 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05 is running failed: container process not found" containerID="99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.244943 4813 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05 is running failed: container process not found" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="a6bcc686-f006-49f4-9451-8184fb2cce32" containerName="watcher-decision-engine" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.321552 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.329116 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.344997 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.345377 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-kuttl-api-log" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345391 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-kuttl-api-log" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.345406 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="sg-core" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345414 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="sg-core" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.345426 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-central-agent" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345434 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-central-agent" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.345444 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-notification-agent" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345451 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-notification-agent" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.345464 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="proxy-httpd" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345474 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="proxy-httpd" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.345500 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b" containerName="mariadb-account-delete" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345508 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b" containerName="mariadb-account-delete" Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.345524 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-api" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345531 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-api" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345723 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b" containerName="mariadb-account-delete" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345738 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-central-agent" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345752 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-kuttl-api-log" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345766 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="sg-core" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345778 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="proxy-httpd" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345789 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="36a54386-f521-421f-8dd9-48393304cec4" containerName="ceilometer-notification-agent" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.345801 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6fc429d-e3c2-40bc-8146-107dbf8dc43e" containerName="watcher-api" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.347497 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.352922 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.354411 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.354714 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.366777 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521220 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-run-httpd\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521269 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521298 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-scripts\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521315 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-log-httpd\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521342 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521381 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521433 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2qpk\" (UniqueName: \"kubernetes.io/projected/ecc73847-6525-4f9a-8323-334a1b7eec64-kube-api-access-x2qpk\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.521504 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-config-data\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.588127 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623304 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-config-data\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623396 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-run-httpd\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623438 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623469 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-scripts\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623516 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-log-httpd\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623554 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623578 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.623614 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2qpk\" (UniqueName: \"kubernetes.io/projected/ecc73847-6525-4f9a-8323-334a1b7eec64-kube-api-access-x2qpk\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.624383 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-log-httpd\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.624989 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-run-httpd\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.642287 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.642951 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-scripts\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.647454 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.648071 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-config-data\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.648221 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.651570 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2qpk\" (UniqueName: \"kubernetes.io/projected/ecc73847-6525-4f9a-8323-334a1b7eec64-kube-api-access-x2qpk\") pod \"ceilometer-0\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.668965 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.725815 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-custom-prometheus-ca\") pod \"a6bcc686-f006-49f4-9451-8184fb2cce32\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.726224 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-config-data\") pod \"a6bcc686-f006-49f4-9451-8184fb2cce32\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.726263 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn4m2\" (UniqueName: \"kubernetes.io/projected/a6bcc686-f006-49f4-9451-8184fb2cce32-kube-api-access-tn4m2\") pod \"a6bcc686-f006-49f4-9451-8184fb2cce32\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.726291 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6bcc686-f006-49f4-9451-8184fb2cce32-logs\") pod \"a6bcc686-f006-49f4-9451-8184fb2cce32\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.726313 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-combined-ca-bundle\") pod \"a6bcc686-f006-49f4-9451-8184fb2cce32\" (UID: \"a6bcc686-f006-49f4-9451-8184fb2cce32\") " Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.727984 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6bcc686-f006-49f4-9451-8184fb2cce32-logs" (OuterVolumeSpecName: "logs") pod "a6bcc686-f006-49f4-9451-8184fb2cce32" (UID: "a6bcc686-f006-49f4-9451-8184fb2cce32"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.731530 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6bcc686-f006-49f4-9451-8184fb2cce32-kube-api-access-tn4m2" (OuterVolumeSpecName: "kube-api-access-tn4m2") pod "a6bcc686-f006-49f4-9451-8184fb2cce32" (UID: "a6bcc686-f006-49f4-9451-8184fb2cce32"). InnerVolumeSpecName "kube-api-access-tn4m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.746182 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "a6bcc686-f006-49f4-9451-8184fb2cce32" (UID: "a6bcc686-f006-49f4-9451-8184fb2cce32"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.753338 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6bcc686-f006-49f4-9451-8184fb2cce32" (UID: "a6bcc686-f006-49f4-9451-8184fb2cce32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.789688 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-config-data" (OuterVolumeSpecName: "config-data") pod "a6bcc686-f006-49f4-9451-8184fb2cce32" (UID: "a6bcc686-f006-49f4-9451-8184fb2cce32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.831539 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.831564 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.831574 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn4m2\" (UniqueName: \"kubernetes.io/projected/a6bcc686-f006-49f4-9451-8184fb2cce32-kube-api-access-tn4m2\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.831585 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6bcc686-f006-49f4-9451-8184fb2cce32-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.831599 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6bcc686-f006-49f4-9451-8184fb2cce32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.924560 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher854b-account-delete-r6twr"] Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.933211 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher854b-account-delete-r6twr"] Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.984414 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-qnpth"] Mar 20 16:17:38 crc kubenswrapper[4813]: E0320 16:17:38.984860 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6bcc686-f006-49f4-9451-8184fb2cce32" containerName="watcher-decision-engine" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.984882 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6bcc686-f006-49f4-9451-8184fb2cce32" containerName="watcher-decision-engine" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.985343 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6bcc686-f006-49f4-9451-8184fb2cce32" containerName="watcher-decision-engine" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.986175 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:38 crc kubenswrapper[4813]: I0320 16:17:38.999723 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qnpth"] Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.009224 4813 generic.go:334] "Generic (PLEG): container finished" podID="a6bcc686-f006-49f4-9451-8184fb2cce32" containerID="99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" exitCode=0 Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.009302 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"a6bcc686-f006-49f4-9451-8184fb2cce32","Type":"ContainerDied","Data":"99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05"} Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.014009 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.016728 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"a6bcc686-f006-49f4-9451-8184fb2cce32","Type":"ContainerDied","Data":"c8bf4fa344c0f66589051dd309f1429d51de3dca524131661601bcb098a0bc99"} Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.016783 4813 scope.go:117] "RemoveContainer" containerID="99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.033100 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/463e1938-e4b8-4ac9-b3cb-f149330a1efa-operator-scripts\") pod \"watcher-db-create-qnpth\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.033143 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsvn8\" (UniqueName: \"kubernetes.io/projected/463e1938-e4b8-4ac9-b3cb-f149330a1efa-kube-api-access-nsvn8\") pod \"watcher-db-create-qnpth\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.040134 4813 scope.go:117] "RemoveContainer" containerID="99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" Mar 20 16:17:39 crc kubenswrapper[4813]: E0320 16:17:39.040584 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05\": container with ID starting with 99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05 not found: ID does not exist" containerID="99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.040627 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05"} err="failed to get container status \"99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05\": rpc error: code = NotFound desc = could not find container \"99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05\": container with ID starting with 99c947296ca2598813480d3242069e61cf3046d3750913a86c3a333580d97c05 not found: ID does not exist" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.074832 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.086051 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.103351 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-a660-account-create-update-9vc6b"] Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.105257 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.111751 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.118158 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-a660-account-create-update-9vc6b"] Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.137033 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/463e1938-e4b8-4ac9-b3cb-f149330a1efa-operator-scripts\") pod \"watcher-db-create-qnpth\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.137110 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsvn8\" (UniqueName: \"kubernetes.io/projected/463e1938-e4b8-4ac9-b3cb-f149330a1efa-kube-api-access-nsvn8\") pod \"watcher-db-create-qnpth\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.146858 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/463e1938-e4b8-4ac9-b3cb-f149330a1efa-operator-scripts\") pod \"watcher-db-create-qnpth\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.158418 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.171868 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsvn8\" (UniqueName: \"kubernetes.io/projected/463e1938-e4b8-4ac9-b3cb-f149330a1efa-kube-api-access-nsvn8\") pod \"watcher-db-create-qnpth\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.242749 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hcjc\" (UniqueName: \"kubernetes.io/projected/04bba761-d948-40dd-8c46-b1b8327d7fb4-kube-api-access-7hcjc\") pod \"watcher-a660-account-create-update-9vc6b\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.242835 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04bba761-d948-40dd-8c46-b1b8327d7fb4-operator-scripts\") pod \"watcher-a660-account-create-update-9vc6b\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.290230 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b" path="/var/lib/kubelet/pods/1c61a2fc-9151-4fa2-aa2a-0ce18fe68c7b/volumes" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.290888 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36a54386-f521-421f-8dd9-48393304cec4" path="/var/lib/kubelet/pods/36a54386-f521-421f-8dd9-48393304cec4/volumes" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.291978 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6bcc686-f006-49f4-9451-8184fb2cce32" path="/var/lib/kubelet/pods/a6bcc686-f006-49f4-9451-8184fb2cce32/volumes" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.309268 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.344307 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hcjc\" (UniqueName: \"kubernetes.io/projected/04bba761-d948-40dd-8c46-b1b8327d7fb4-kube-api-access-7hcjc\") pod \"watcher-a660-account-create-update-9vc6b\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.344378 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04bba761-d948-40dd-8c46-b1b8327d7fb4-operator-scripts\") pod \"watcher-a660-account-create-update-9vc6b\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.346323 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04bba761-d948-40dd-8c46-b1b8327d7fb4-operator-scripts\") pod \"watcher-a660-account-create-update-9vc6b\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.365597 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hcjc\" (UniqueName: \"kubernetes.io/projected/04bba761-d948-40dd-8c46-b1b8327d7fb4-kube-api-access-7hcjc\") pod \"watcher-a660-account-create-update-9vc6b\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.436800 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.457136 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.648948 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-combined-ca-bundle\") pod \"5081b10c-49a8-415e-bffa-ed620820005f\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.649042 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-config-data\") pod \"5081b10c-49a8-415e-bffa-ed620820005f\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.649175 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r857r\" (UniqueName: \"kubernetes.io/projected/5081b10c-49a8-415e-bffa-ed620820005f-kube-api-access-r857r\") pod \"5081b10c-49a8-415e-bffa-ed620820005f\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.649306 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5081b10c-49a8-415e-bffa-ed620820005f-logs\") pod \"5081b10c-49a8-415e-bffa-ed620820005f\" (UID: \"5081b10c-49a8-415e-bffa-ed620820005f\") " Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.657916 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5081b10c-49a8-415e-bffa-ed620820005f-logs" (OuterVolumeSpecName: "logs") pod "5081b10c-49a8-415e-bffa-ed620820005f" (UID: "5081b10c-49a8-415e-bffa-ed620820005f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.659624 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5081b10c-49a8-415e-bffa-ed620820005f-kube-api-access-r857r" (OuterVolumeSpecName: "kube-api-access-r857r") pod "5081b10c-49a8-415e-bffa-ed620820005f" (UID: "5081b10c-49a8-415e-bffa-ed620820005f"). InnerVolumeSpecName "kube-api-access-r857r". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.661665 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qnpth"] Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.682914 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5081b10c-49a8-415e-bffa-ed620820005f" (UID: "5081b10c-49a8-415e-bffa-ed620820005f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.702767 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-config-data" (OuterVolumeSpecName: "config-data") pod "5081b10c-49a8-415e-bffa-ed620820005f" (UID: "5081b10c-49a8-415e-bffa-ed620820005f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.751738 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r857r\" (UniqueName: \"kubernetes.io/projected/5081b10c-49a8-415e-bffa-ed620820005f-kube-api-access-r857r\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.751772 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5081b10c-49a8-415e-bffa-ed620820005f-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.751782 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.751792 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5081b10c-49a8-415e-bffa-ed620820005f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:39 crc kubenswrapper[4813]: I0320 16:17:39.964759 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-a660-account-create-update-9vc6b"] Mar 20 16:17:39 crc kubenswrapper[4813]: W0320 16:17:39.969224 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04bba761_d948_40dd_8c46_b1b8327d7fb4.slice/crio-9a47089f863ff267963e41cf66c48a55486d08858959861af25a2fbc467927fa WatchSource:0}: Error finding container 9a47089f863ff267963e41cf66c48a55486d08858959861af25a2fbc467927fa: Status 404 returned error can't find the container with id 9a47089f863ff267963e41cf66c48a55486d08858959861af25a2fbc467927fa Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.032634 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerStarted","Data":"ed2cfb6307e9400e472bb86fad908b038b284e6c9fbd17215034982da61fe4b3"} Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.034281 4813 generic.go:334] "Generic (PLEG): container finished" podID="5081b10c-49a8-415e-bffa-ed620820005f" containerID="533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" exitCode=0 Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.034329 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5081b10c-49a8-415e-bffa-ed620820005f","Type":"ContainerDied","Data":"533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1"} Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.034345 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5081b10c-49a8-415e-bffa-ed620820005f","Type":"ContainerDied","Data":"0192af7b2ec3a3468cb3ada4ace12e901d67cfda4086626b7364fbde130b38b2"} Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.034370 4813 scope.go:117] "RemoveContainer" containerID="533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.034497 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.036172 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" event={"ID":"04bba761-d948-40dd-8c46-b1b8327d7fb4","Type":"ContainerStarted","Data":"9a47089f863ff267963e41cf66c48a55486d08858959861af25a2fbc467927fa"} Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.041495 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qnpth" event={"ID":"463e1938-e4b8-4ac9-b3cb-f149330a1efa","Type":"ContainerStarted","Data":"a327420812e82c312a90374645a54160d24b34b0040c2e72a092d1f44fadc725"} Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.041554 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qnpth" event={"ID":"463e1938-e4b8-4ac9-b3cb-f149330a1efa","Type":"ContainerStarted","Data":"91cd7ea59416d853258a9b5c67ebffb18cb47bb31c2e31d2eba000b4fb24cd72"} Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.064223 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-db-create-qnpth" podStartSLOduration=2.064202005 podStartE2EDuration="2.064202005s" podCreationTimestamp="2026-03-20 16:17:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:17:40.060270759 +0000 UTC m=+2389.482973610" watchObservedRunningTime="2026-03-20 16:17:40.064202005 +0000 UTC m=+2389.486904846" Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.078235 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.088509 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.093009 4813 scope.go:117] "RemoveContainer" containerID="533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" Mar 20 16:17:40 crc kubenswrapper[4813]: E0320 16:17:40.095931 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1\": container with ID starting with 533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1 not found: ID does not exist" containerID="533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1" Mar 20 16:17:40 crc kubenswrapper[4813]: I0320 16:17:40.095995 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1"} err="failed to get container status \"533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1\": rpc error: code = NotFound desc = could not find container \"533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1\": container with ID starting with 533482a60f414cfd716927a6e4bff69fda9d3b69a99a5b25243ef532822458b1 not found: ID does not exist" Mar 20 16:17:41 crc kubenswrapper[4813]: I0320 16:17:41.086615 4813 generic.go:334] "Generic (PLEG): container finished" podID="04bba761-d948-40dd-8c46-b1b8327d7fb4" containerID="cc83545d0b4e417d30fcc50653d472fe3467a72515f2ec6f869451ef735e247e" exitCode=0 Mar 20 16:17:41 crc kubenswrapper[4813]: I0320 16:17:41.088052 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" event={"ID":"04bba761-d948-40dd-8c46-b1b8327d7fb4","Type":"ContainerDied","Data":"cc83545d0b4e417d30fcc50653d472fe3467a72515f2ec6f869451ef735e247e"} Mar 20 16:17:41 crc kubenswrapper[4813]: I0320 16:17:41.092540 4813 generic.go:334] "Generic (PLEG): container finished" podID="463e1938-e4b8-4ac9-b3cb-f149330a1efa" containerID="a327420812e82c312a90374645a54160d24b34b0040c2e72a092d1f44fadc725" exitCode=0 Mar 20 16:17:41 crc kubenswrapper[4813]: I0320 16:17:41.092614 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qnpth" event={"ID":"463e1938-e4b8-4ac9-b3cb-f149330a1efa","Type":"ContainerDied","Data":"a327420812e82c312a90374645a54160d24b34b0040c2e72a092d1f44fadc725"} Mar 20 16:17:41 crc kubenswrapper[4813]: I0320 16:17:41.105093 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerStarted","Data":"16f85b099c64bd806975d1ed66076ec2817d286749eaa5e0a12d43d5ac321273"} Mar 20 16:17:41 crc kubenswrapper[4813]: I0320 16:17:41.278322 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5081b10c-49a8-415e-bffa-ed620820005f" path="/var/lib/kubelet/pods/5081b10c-49a8-415e-bffa-ed620820005f/volumes" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.117602 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerStarted","Data":"c3ecf7c79f34b497b4fae7ea66fe7c381b82d74811b1f5c93a737233f17a2e12"} Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.117953 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerStarted","Data":"8bdc6016a55d55ea5f6f209fc1c2d578a2b162f8155afd0cd188348715ca00d6"} Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.456885 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.487688 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.593848 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsvn8\" (UniqueName: \"kubernetes.io/projected/463e1938-e4b8-4ac9-b3cb-f149330a1efa-kube-api-access-nsvn8\") pod \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.594175 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hcjc\" (UniqueName: \"kubernetes.io/projected/04bba761-d948-40dd-8c46-b1b8327d7fb4-kube-api-access-7hcjc\") pod \"04bba761-d948-40dd-8c46-b1b8327d7fb4\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.594269 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/463e1938-e4b8-4ac9-b3cb-f149330a1efa-operator-scripts\") pod \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\" (UID: \"463e1938-e4b8-4ac9-b3cb-f149330a1efa\") " Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.594328 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04bba761-d948-40dd-8c46-b1b8327d7fb4-operator-scripts\") pod \"04bba761-d948-40dd-8c46-b1b8327d7fb4\" (UID: \"04bba761-d948-40dd-8c46-b1b8327d7fb4\") " Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.595261 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04bba761-d948-40dd-8c46-b1b8327d7fb4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "04bba761-d948-40dd-8c46-b1b8327d7fb4" (UID: "04bba761-d948-40dd-8c46-b1b8327d7fb4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.595540 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/463e1938-e4b8-4ac9-b3cb-f149330a1efa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "463e1938-e4b8-4ac9-b3cb-f149330a1efa" (UID: "463e1938-e4b8-4ac9-b3cb-f149330a1efa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.598897 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/463e1938-e4b8-4ac9-b3cb-f149330a1efa-kube-api-access-nsvn8" (OuterVolumeSpecName: "kube-api-access-nsvn8") pod "463e1938-e4b8-4ac9-b3cb-f149330a1efa" (UID: "463e1938-e4b8-4ac9-b3cb-f149330a1efa"). InnerVolumeSpecName "kube-api-access-nsvn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.599179 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04bba761-d948-40dd-8c46-b1b8327d7fb4-kube-api-access-7hcjc" (OuterVolumeSpecName: "kube-api-access-7hcjc") pod "04bba761-d948-40dd-8c46-b1b8327d7fb4" (UID: "04bba761-d948-40dd-8c46-b1b8327d7fb4"). InnerVolumeSpecName "kube-api-access-7hcjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.695670 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsvn8\" (UniqueName: \"kubernetes.io/projected/463e1938-e4b8-4ac9-b3cb-f149330a1efa-kube-api-access-nsvn8\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.695718 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hcjc\" (UniqueName: \"kubernetes.io/projected/04bba761-d948-40dd-8c46-b1b8327d7fb4-kube-api-access-7hcjc\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.695732 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/463e1938-e4b8-4ac9-b3cb-f149330a1efa-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:42 crc kubenswrapper[4813]: I0320 16:17:42.695742 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04bba761-d948-40dd-8c46-b1b8327d7fb4-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:43 crc kubenswrapper[4813]: I0320 16:17:43.127906 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" Mar 20 16:17:43 crc kubenswrapper[4813]: I0320 16:17:43.127920 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-a660-account-create-update-9vc6b" event={"ID":"04bba761-d948-40dd-8c46-b1b8327d7fb4","Type":"ContainerDied","Data":"9a47089f863ff267963e41cf66c48a55486d08858959861af25a2fbc467927fa"} Mar 20 16:17:43 crc kubenswrapper[4813]: I0320 16:17:43.127963 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a47089f863ff267963e41cf66c48a55486d08858959861af25a2fbc467927fa" Mar 20 16:17:43 crc kubenswrapper[4813]: I0320 16:17:43.129992 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qnpth" event={"ID":"463e1938-e4b8-4ac9-b3cb-f149330a1efa","Type":"ContainerDied","Data":"91cd7ea59416d853258a9b5c67ebffb18cb47bb31c2e31d2eba000b4fb24cd72"} Mar 20 16:17:43 crc kubenswrapper[4813]: I0320 16:17:43.130026 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91cd7ea59416d853258a9b5c67ebffb18cb47bb31c2e31d2eba000b4fb24cd72" Mar 20 16:17:43 crc kubenswrapper[4813]: I0320 16:17:43.130067 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qnpth" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.144773 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerStarted","Data":"be153c0d60bc666aba884582a3c4ef6446568e61098e5d4197a4bfff80e24a64"} Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.145128 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.176642 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.7949899519999999 podStartE2EDuration="6.176617873s" podCreationTimestamp="2026-03-20 16:17:38 +0000 UTC" firstStartedPulling="2026-03-20 16:17:39.198374798 +0000 UTC m=+2388.621077639" lastFinishedPulling="2026-03-20 16:17:43.580002719 +0000 UTC m=+2393.002705560" observedRunningTime="2026-03-20 16:17:44.168103584 +0000 UTC m=+2393.590806425" watchObservedRunningTime="2026-03-20 16:17:44.176617873 +0000 UTC m=+2393.599320714" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.434645 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh"] Mar 20 16:17:44 crc kubenswrapper[4813]: E0320 16:17:44.435094 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5081b10c-49a8-415e-bffa-ed620820005f" containerName="watcher-applier" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.435119 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5081b10c-49a8-415e-bffa-ed620820005f" containerName="watcher-applier" Mar 20 16:17:44 crc kubenswrapper[4813]: E0320 16:17:44.435156 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="463e1938-e4b8-4ac9-b3cb-f149330a1efa" containerName="mariadb-database-create" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.435165 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="463e1938-e4b8-4ac9-b3cb-f149330a1efa" containerName="mariadb-database-create" Mar 20 16:17:44 crc kubenswrapper[4813]: E0320 16:17:44.435182 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04bba761-d948-40dd-8c46-b1b8327d7fb4" containerName="mariadb-account-create-update" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.435190 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="04bba761-d948-40dd-8c46-b1b8327d7fb4" containerName="mariadb-account-create-update" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.435403 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="463e1938-e4b8-4ac9-b3cb-f149330a1efa" containerName="mariadb-database-create" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.435423 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5081b10c-49a8-415e-bffa-ed620820005f" containerName="watcher-applier" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.435448 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="04bba761-d948-40dd-8c46-b1b8327d7fb4" containerName="mariadb-account-create-update" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.436181 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.438131 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-n2pzs" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.438178 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.449230 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh"] Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.529299 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvjk5\" (UniqueName: \"kubernetes.io/projected/a8432d14-de6c-4396-8854-8411787cbfdf-kube-api-access-pvjk5\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.529355 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.529399 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-config-data\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.529618 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-db-sync-config-data\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.630723 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-db-sync-config-data\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.630816 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvjk5\" (UniqueName: \"kubernetes.io/projected/a8432d14-de6c-4396-8854-8411787cbfdf-kube-api-access-pvjk5\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.630846 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.630871 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-config-data\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.634931 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-db-sync-config-data\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.635643 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.638156 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-config-data\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.646778 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvjk5\" (UniqueName: \"kubernetes.io/projected/a8432d14-de6c-4396-8854-8411787cbfdf-kube-api-access-pvjk5\") pod \"watcher-kuttl-db-sync-vp6nh\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:44 crc kubenswrapper[4813]: I0320 16:17:44.800378 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:45 crc kubenswrapper[4813]: I0320 16:17:45.338452 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh"] Mar 20 16:17:46 crc kubenswrapper[4813]: I0320 16:17:46.165951 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" event={"ID":"a8432d14-de6c-4396-8854-8411787cbfdf","Type":"ContainerStarted","Data":"b6684b5755c588d5eb87b616c9d881a382ef66d3a3f30a13dd2affca42917ffa"} Mar 20 16:17:46 crc kubenswrapper[4813]: I0320 16:17:46.166334 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" event={"ID":"a8432d14-de6c-4396-8854-8411787cbfdf","Type":"ContainerStarted","Data":"274be982ad8544900692234f85c6e21b4d5610292d9ca0b2a5936f5c50aa4088"} Mar 20 16:17:46 crc kubenswrapper[4813]: I0320 16:17:46.209092 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" podStartSLOduration=2.209070411 podStartE2EDuration="2.209070411s" podCreationTimestamp="2026-03-20 16:17:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:17:46.204287262 +0000 UTC m=+2395.626990103" watchObservedRunningTime="2026-03-20 16:17:46.209070411 +0000 UTC m=+2395.631773252" Mar 20 16:17:48 crc kubenswrapper[4813]: I0320 16:17:48.185617 4813 generic.go:334] "Generic (PLEG): container finished" podID="a8432d14-de6c-4396-8854-8411787cbfdf" containerID="b6684b5755c588d5eb87b616c9d881a382ef66d3a3f30a13dd2affca42917ffa" exitCode=0 Mar 20 16:17:48 crc kubenswrapper[4813]: I0320 16:17:48.185677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" event={"ID":"a8432d14-de6c-4396-8854-8411787cbfdf","Type":"ContainerDied","Data":"b6684b5755c588d5eb87b616c9d881a382ef66d3a3f30a13dd2affca42917ffa"} Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.594173 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.721863 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-config-data\") pod \"a8432d14-de6c-4396-8854-8411787cbfdf\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.722053 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvjk5\" (UniqueName: \"kubernetes.io/projected/a8432d14-de6c-4396-8854-8411787cbfdf-kube-api-access-pvjk5\") pod \"a8432d14-de6c-4396-8854-8411787cbfdf\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.722092 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-combined-ca-bundle\") pod \"a8432d14-de6c-4396-8854-8411787cbfdf\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.722149 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-db-sync-config-data\") pod \"a8432d14-de6c-4396-8854-8411787cbfdf\" (UID: \"a8432d14-de6c-4396-8854-8411787cbfdf\") " Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.736681 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a8432d14-de6c-4396-8854-8411787cbfdf" (UID: "a8432d14-de6c-4396-8854-8411787cbfdf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.746693 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8432d14-de6c-4396-8854-8411787cbfdf-kube-api-access-pvjk5" (OuterVolumeSpecName: "kube-api-access-pvjk5") pod "a8432d14-de6c-4396-8854-8411787cbfdf" (UID: "a8432d14-de6c-4396-8854-8411787cbfdf"). InnerVolumeSpecName "kube-api-access-pvjk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.777710 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8432d14-de6c-4396-8854-8411787cbfdf" (UID: "a8432d14-de6c-4396-8854-8411787cbfdf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.806157 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-config-data" (OuterVolumeSpecName: "config-data") pod "a8432d14-de6c-4396-8854-8411787cbfdf" (UID: "a8432d14-de6c-4396-8854-8411787cbfdf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.823991 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.824233 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvjk5\" (UniqueName: \"kubernetes.io/projected/a8432d14-de6c-4396-8854-8411787cbfdf-kube-api-access-pvjk5\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.824301 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:49 crc kubenswrapper[4813]: I0320 16:17:49.824366 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8432d14-de6c-4396-8854-8411787cbfdf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.204878 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" event={"ID":"a8432d14-de6c-4396-8854-8411787cbfdf","Type":"ContainerDied","Data":"274be982ad8544900692234f85c6e21b4d5610292d9ca0b2a5936f5c50aa4088"} Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.204924 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="274be982ad8544900692234f85c6e21b4d5610292d9ca0b2a5936f5c50aa4088" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.204986 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.982804 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:17:50 crc kubenswrapper[4813]: E0320 16:17:50.983104 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8432d14-de6c-4396-8854-8411787cbfdf" containerName="watcher-kuttl-db-sync" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.983117 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8432d14-de6c-4396-8854-8411787cbfdf" containerName="watcher-kuttl-db-sync" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.983247 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8432d14-de6c-4396-8854-8411787cbfdf" containerName="watcher-kuttl-db-sync" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.984042 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.985939 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-n2pzs" Mar 20 16:17:50 crc kubenswrapper[4813]: I0320 16:17:50.986613 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.005519 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.047833 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.049234 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.050634 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.063288 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.064407 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.069027 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.090360 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.107957 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143132 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143191 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143215 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143233 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143268 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr7kp\" (UniqueName: \"kubernetes.io/projected/b5b66c98-4310-4442-a9a6-25a828d1c1cc-kube-api-access-cr7kp\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143302 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143339 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b66c98-4310-4442-a9a6-25a828d1c1cc-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143381 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29b5cee9-5160-4f31-a9ab-733e1ef673ba-logs\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.143396 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnz8h\" (UniqueName: \"kubernetes.io/projected/29b5cee9-5160-4f31-a9ab-733e1ef673ba-kube-api-access-lnz8h\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.244865 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.244925 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.244953 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.244983 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr7kp\" (UniqueName: \"kubernetes.io/projected/b5b66c98-4310-4442-a9a6-25a828d1c1cc-kube-api-access-cr7kp\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245030 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245058 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b66c98-4310-4442-a9a6-25a828d1c1cc-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245084 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245121 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29b5cee9-5160-4f31-a9ab-733e1ef673ba-logs\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245152 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnz8h\" (UniqueName: \"kubernetes.io/projected/29b5cee9-5160-4f31-a9ab-733e1ef673ba-kube-api-access-lnz8h\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245201 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245237 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245277 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245320 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245348 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lkps\" (UniqueName: \"kubernetes.io/projected/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-kube-api-access-9lkps\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245800 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b66c98-4310-4442-a9a6-25a828d1c1cc-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.245897 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29b5cee9-5160-4f31-a9ab-733e1ef673ba-logs\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.250290 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.251535 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.251763 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.285954 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.286079 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.286307 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.289615 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnz8h\" (UniqueName: \"kubernetes.io/projected/29b5cee9-5160-4f31-a9ab-733e1ef673ba-kube-api-access-lnz8h\") pod \"watcher-kuttl-api-0\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.300264 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr7kp\" (UniqueName: \"kubernetes.io/projected/b5b66c98-4310-4442-a9a6-25a828d1c1cc-kube-api-access-cr7kp\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.301106 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.346403 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lkps\" (UniqueName: \"kubernetes.io/projected/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-kube-api-access-9lkps\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.346567 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.346605 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.346637 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.347007 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.351191 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.356087 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.364377 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lkps\" (UniqueName: \"kubernetes.io/projected/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-kube-api-access-9lkps\") pod \"watcher-kuttl-applier-0\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.366189 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.385299 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.798638 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:17:51 crc kubenswrapper[4813]: W0320 16:17:51.905523 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71dbbcff_6731_4ec5_8f1a_7233a3d105bc.slice/crio-9605d374c89b47be11501a9e9d5993b36056b63278b5506c506aa0ea20320df1 WatchSource:0}: Error finding container 9605d374c89b47be11501a9e9d5993b36056b63278b5506c506aa0ea20320df1: Status 404 returned error can't find the container with id 9605d374c89b47be11501a9e9d5993b36056b63278b5506c506aa0ea20320df1 Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.912712 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:17:51 crc kubenswrapper[4813]: I0320 16:17:51.919273 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.218918 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"b5b66c98-4310-4442-a9a6-25a828d1c1cc","Type":"ContainerStarted","Data":"7c008e4b8cde5be1afc5c20235c3cb8c2a460538f7cbebdbbbaeb7e54c2763e8"} Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.219250 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"b5b66c98-4310-4442-a9a6-25a828d1c1cc","Type":"ContainerStarted","Data":"de7e473d4bc02abb27368ae0540f5690f237483d9c50aa87b117de1664330642"} Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.220283 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"29b5cee9-5160-4f31-a9ab-733e1ef673ba","Type":"ContainerStarted","Data":"3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda"} Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.220308 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"29b5cee9-5160-4f31-a9ab-733e1ef673ba","Type":"ContainerStarted","Data":"1c082bbdb1a379e2b199598c0b3c29753b4c95e70d47d0b4697bf05fbdf49797"} Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.222525 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"71dbbcff-6731-4ec5-8f1a-7233a3d105bc","Type":"ContainerStarted","Data":"fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67"} Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.222552 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"71dbbcff-6731-4ec5-8f1a-7233a3d105bc","Type":"ContainerStarted","Data":"9605d374c89b47be11501a9e9d5993b36056b63278b5506c506aa0ea20320df1"} Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.248962 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.248948381 podStartE2EDuration="2.248948381s" podCreationTimestamp="2026-03-20 16:17:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:17:52.248292224 +0000 UTC m=+2401.670995075" watchObservedRunningTime="2026-03-20 16:17:52.248948381 +0000 UTC m=+2401.671651222" Mar 20 16:17:52 crc kubenswrapper[4813]: I0320 16:17:52.273971 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=1.273957356 podStartE2EDuration="1.273957356s" podCreationTimestamp="2026-03-20 16:17:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:17:52.273129124 +0000 UTC m=+2401.695831965" watchObservedRunningTime="2026-03-20 16:17:52.273957356 +0000 UTC m=+2401.696660197" Mar 20 16:17:53 crc kubenswrapper[4813]: I0320 16:17:53.232005 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"29b5cee9-5160-4f31-a9ab-733e1ef673ba","Type":"ContainerStarted","Data":"80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22"} Mar 20 16:17:53 crc kubenswrapper[4813]: I0320 16:17:53.232747 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:53 crc kubenswrapper[4813]: I0320 16:17:53.257361 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=3.2573401840000002 podStartE2EDuration="3.257340184s" podCreationTimestamp="2026-03-20 16:17:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:17:53.251609599 +0000 UTC m=+2402.674312440" watchObservedRunningTime="2026-03-20 16:17:53.257340184 +0000 UTC m=+2402.680043025" Mar 20 16:17:55 crc kubenswrapper[4813]: I0320 16:17:55.261276 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 16:17:55 crc kubenswrapper[4813]: I0320 16:17:55.496421 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:56 crc kubenswrapper[4813]: I0320 16:17:56.301283 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:17:56 crc kubenswrapper[4813]: I0320 16:17:56.386400 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.158720 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567058-nsmxd"] Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.160536 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.163993 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.164176 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.164718 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.166465 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567058-nsmxd"] Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.299349 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs64q\" (UniqueName: \"kubernetes.io/projected/8b36cb93-16d8-40e3-bff6-45a70579002e-kube-api-access-cs64q\") pod \"auto-csr-approver-29567058-nsmxd\" (UID: \"8b36cb93-16d8-40e3-bff6-45a70579002e\") " pod="openshift-infra/auto-csr-approver-29567058-nsmxd" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.400322 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs64q\" (UniqueName: \"kubernetes.io/projected/8b36cb93-16d8-40e3-bff6-45a70579002e-kube-api-access-cs64q\") pod \"auto-csr-approver-29567058-nsmxd\" (UID: \"8b36cb93-16d8-40e3-bff6-45a70579002e\") " pod="openshift-infra/auto-csr-approver-29567058-nsmxd" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.418808 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs64q\" (UniqueName: \"kubernetes.io/projected/8b36cb93-16d8-40e3-bff6-45a70579002e-kube-api-access-cs64q\") pod \"auto-csr-approver-29567058-nsmxd\" (UID: \"8b36cb93-16d8-40e3-bff6-45a70579002e\") " pod="openshift-infra/auto-csr-approver-29567058-nsmxd" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.485141 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" Mar 20 16:18:00 crc kubenswrapper[4813]: I0320 16:18:00.928017 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567058-nsmxd"] Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.302559 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.310974 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" event={"ID":"8b36cb93-16d8-40e3-bff6-45a70579002e","Type":"ContainerStarted","Data":"a9701cc6e43a8a3e7871e52884bfee5addfd75546e051fd62eac27e95a0bce36"} Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.314807 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.322983 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.366736 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.386472 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.401078 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:01 crc kubenswrapper[4813]: I0320 16:18:01.430621 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:02 crc kubenswrapper[4813]: I0320 16:18:02.321187 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" event={"ID":"8b36cb93-16d8-40e3-bff6-45a70579002e","Type":"ContainerStarted","Data":"f6696a828ef2dc7ee0b5ea8697e1eed6d4d52c01afa0ad0ec2b23b1c7dc77382"} Mar 20 16:18:02 crc kubenswrapper[4813]: I0320 16:18:02.322228 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:02 crc kubenswrapper[4813]: I0320 16:18:02.349010 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" podStartSLOduration=1.390726248 podStartE2EDuration="2.348994888s" podCreationTimestamp="2026-03-20 16:18:00 +0000 UTC" firstStartedPulling="2026-03-20 16:18:00.93853129 +0000 UTC m=+2410.361234131" lastFinishedPulling="2026-03-20 16:18:01.89679993 +0000 UTC m=+2411.319502771" observedRunningTime="2026-03-20 16:18:02.34313661 +0000 UTC m=+2411.765839471" watchObservedRunningTime="2026-03-20 16:18:02.348994888 +0000 UTC m=+2411.771697729" Mar 20 16:18:02 crc kubenswrapper[4813]: I0320 16:18:02.359894 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:02 crc kubenswrapper[4813]: I0320 16:18:02.363754 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.332053 4813 generic.go:334] "Generic (PLEG): container finished" podID="8b36cb93-16d8-40e3-bff6-45a70579002e" containerID="f6696a828ef2dc7ee0b5ea8697e1eed6d4d52c01afa0ad0ec2b23b1c7dc77382" exitCode=0 Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.332166 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" event={"ID":"8b36cb93-16d8-40e3-bff6-45a70579002e","Type":"ContainerDied","Data":"f6696a828ef2dc7ee0b5ea8697e1eed6d4d52c01afa0ad0ec2b23b1c7dc77382"} Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.929839 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.930387 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="sg-core" containerID="cri-o://c3ecf7c79f34b497b4fae7ea66fe7c381b82d74811b1f5c93a737233f17a2e12" gracePeriod=30 Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.930466 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="proxy-httpd" containerID="cri-o://be153c0d60bc666aba884582a3c4ef6446568e61098e5d4197a4bfff80e24a64" gracePeriod=30 Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.930339 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-central-agent" containerID="cri-o://16f85b099c64bd806975d1ed66076ec2817d286749eaa5e0a12d43d5ac321273" gracePeriod=30 Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.930649 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-notification-agent" containerID="cri-o://8bdc6016a55d55ea5f6f209fc1c2d578a2b162f8155afd0cd188348715ca00d6" gracePeriod=30 Mar 20 16:18:03 crc kubenswrapper[4813]: I0320 16:18:03.956141 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.344129 4813 generic.go:334] "Generic (PLEG): container finished" podID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerID="be153c0d60bc666aba884582a3c4ef6446568e61098e5d4197a4bfff80e24a64" exitCode=0 Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.344159 4813 generic.go:334] "Generic (PLEG): container finished" podID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerID="c3ecf7c79f34b497b4fae7ea66fe7c381b82d74811b1f5c93a737233f17a2e12" exitCode=2 Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.346152 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerDied","Data":"be153c0d60bc666aba884582a3c4ef6446568e61098e5d4197a4bfff80e24a64"} Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.346288 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerDied","Data":"c3ecf7c79f34b497b4fae7ea66fe7c381b82d74811b1f5c93a737233f17a2e12"} Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.394007 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh"] Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.409398 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-vp6nh"] Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.428598 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watchera660-account-delete-pww8c"] Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.430514 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.482667 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watchera660-account-delete-pww8c"] Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.488117 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ef49c-d75c-43cf-a61e-8748ccb926a2-operator-scripts\") pod \"watchera660-account-delete-pww8c\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.488174 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhxlm\" (UniqueName: \"kubernetes.io/projected/596ef49c-d75c-43cf-a61e-8748ccb926a2-kube-api-access-fhxlm\") pod \"watchera660-account-delete-pww8c\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: E0320 16:18:04.488767 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:04 crc kubenswrapper[4813]: E0320 16:18:04.488818 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data podName:b5b66c98-4310-4442-a9a6-25a828d1c1cc nodeName:}" failed. No retries permitted until 2026-03-20 16:18:04.988801762 +0000 UTC m=+2414.411504603 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.527222 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.556555 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.557008 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="71dbbcff-6731-4ec5-8f1a-7233a3d105bc" containerName="watcher-applier" containerID="cri-o://fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67" gracePeriod=30 Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.589911 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ef49c-d75c-43cf-a61e-8748ccb926a2-operator-scripts\") pod \"watchera660-account-delete-pww8c\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.589971 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhxlm\" (UniqueName: \"kubernetes.io/projected/596ef49c-d75c-43cf-a61e-8748ccb926a2-kube-api-access-fhxlm\") pod \"watchera660-account-delete-pww8c\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.590800 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ef49c-d75c-43cf-a61e-8748ccb926a2-operator-scripts\") pod \"watchera660-account-delete-pww8c\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.666306 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhxlm\" (UniqueName: \"kubernetes.io/projected/596ef49c-d75c-43cf-a61e-8748ccb926a2-kube-api-access-fhxlm\") pod \"watchera660-account-delete-pww8c\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.669760 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.670240 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-kuttl-api-log" containerID="cri-o://3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda" gracePeriod=30 Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.670821 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-api" containerID="cri-o://80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22" gracePeriod=30 Mar 20 16:18:04 crc kubenswrapper[4813]: I0320 16:18:04.837593 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:04 crc kubenswrapper[4813]: E0320 16:18:04.998719 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:04 crc kubenswrapper[4813]: E0320 16:18:04.998799 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data podName:b5b66c98-4310-4442-a9a6-25a828d1c1cc nodeName:}" failed. No retries permitted until 2026-03-20 16:18:05.99877983 +0000 UTC m=+2415.421482671 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.003705 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.101847 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs64q\" (UniqueName: \"kubernetes.io/projected/8b36cb93-16d8-40e3-bff6-45a70579002e-kube-api-access-cs64q\") pod \"8b36cb93-16d8-40e3-bff6-45a70579002e\" (UID: \"8b36cb93-16d8-40e3-bff6-45a70579002e\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.115721 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b36cb93-16d8-40e3-bff6-45a70579002e-kube-api-access-cs64q" (OuterVolumeSpecName: "kube-api-access-cs64q") pod "8b36cb93-16d8-40e3-bff6-45a70579002e" (UID: "8b36cb93-16d8-40e3-bff6-45a70579002e"). InnerVolumeSpecName "kube-api-access-cs64q". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.203585 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs64q\" (UniqueName: \"kubernetes.io/projected/8b36cb93-16d8-40e3-bff6-45a70579002e-kube-api-access-cs64q\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.274862 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8432d14-de6c-4396-8854-8411787cbfdf" path="/var/lib/kubelet/pods/a8432d14-de6c-4396-8854-8411787cbfdf/volumes" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.366401 4813 generic.go:334] "Generic (PLEG): container finished" podID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerID="3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda" exitCode=143 Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.367747 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"29b5cee9-5160-4f31-a9ab-733e1ef673ba","Type":"ContainerDied","Data":"3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda"} Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.379231 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.379879 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567058-nsmxd" event={"ID":"8b36cb93-16d8-40e3-bff6-45a70579002e","Type":"ContainerDied","Data":"a9701cc6e43a8a3e7871e52884bfee5addfd75546e051fd62eac27e95a0bce36"} Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.379906 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9701cc6e43a8a3e7871e52884bfee5addfd75546e051fd62eac27e95a0bce36" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.386694 4813 generic.go:334] "Generic (PLEG): container finished" podID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerID="8bdc6016a55d55ea5f6f209fc1c2d578a2b162f8155afd0cd188348715ca00d6" exitCode=0 Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.386722 4813 generic.go:334] "Generic (PLEG): container finished" podID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerID="16f85b099c64bd806975d1ed66076ec2817d286749eaa5e0a12d43d5ac321273" exitCode=0 Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.386864 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="b5b66c98-4310-4442-a9a6-25a828d1c1cc" containerName="watcher-decision-engine" containerID="cri-o://7c008e4b8cde5be1afc5c20235c3cb8c2a460538f7cbebdbbbaeb7e54c2763e8" gracePeriod=30 Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.387149 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerDied","Data":"8bdc6016a55d55ea5f6f209fc1c2d578a2b162f8155afd0cd188348715ca00d6"} Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.387174 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerDied","Data":"16f85b099c64bd806975d1ed66076ec2817d286749eaa5e0a12d43d5ac321273"} Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.423292 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567052-2pwgn"] Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.432386 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567052-2pwgn"] Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.453425 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.565701 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watchera660-account-delete-pww8c"] Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.610858 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-run-httpd\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.611335 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-log-httpd\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.611461 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2qpk\" (UniqueName: \"kubernetes.io/projected/ecc73847-6525-4f9a-8323-334a1b7eec64-kube-api-access-x2qpk\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.611573 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-sg-core-conf-yaml\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.611690 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-ceilometer-tls-certs\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.611782 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-config-data\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.611868 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-scripts\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.611987 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-combined-ca-bundle\") pod \"ecc73847-6525-4f9a-8323-334a1b7eec64\" (UID: \"ecc73847-6525-4f9a-8323-334a1b7eec64\") " Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.614062 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.614731 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.629950 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-scripts" (OuterVolumeSpecName: "scripts") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.637099 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.637720 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc73847-6525-4f9a-8323-334a1b7eec64-kube-api-access-x2qpk" (OuterVolumeSpecName: "kube-api-access-x2qpk") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "kube-api-access-x2qpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.689724 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.701318 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.713840 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2qpk\" (UniqueName: \"kubernetes.io/projected/ecc73847-6525-4f9a-8323-334a1b7eec64-kube-api-access-x2qpk\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.713878 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.713891 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.713902 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.713915 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.713926 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.713938 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecc73847-6525-4f9a-8323-334a1b7eec64-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.725253 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-config-data" (OuterVolumeSpecName: "config-data") pod "ecc73847-6525-4f9a-8323-334a1b7eec64" (UID: "ecc73847-6525-4f9a-8323-334a1b7eec64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:05 crc kubenswrapper[4813]: I0320 16:18:05.815690 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc73847-6525-4f9a-8323-334a1b7eec64-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.029520 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.029837 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data podName:b5b66c98-4310-4442-a9a6-25a828d1c1cc nodeName:}" failed. No retries permitted until 2026-03-20 16:18:08.029822543 +0000 UTC m=+2417.452525384 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.271257 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.389231 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.390397 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.391800 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.391844 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="71dbbcff-6731-4ec5-8f1a-7233a3d105bc" containerName="watcher-applier" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.396313 4813 generic.go:334] "Generic (PLEG): container finished" podID="596ef49c-d75c-43cf-a61e-8748ccb926a2" containerID="b2a85925b98f2125ff57ed0715a506cd6ebf040b930251a7229e326ab6c5c65f" exitCode=0 Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.396376 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchera660-account-delete-pww8c" event={"ID":"596ef49c-d75c-43cf-a61e-8748ccb926a2","Type":"ContainerDied","Data":"b2a85925b98f2125ff57ed0715a506cd6ebf040b930251a7229e326ab6c5c65f"} Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.396399 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchera660-account-delete-pww8c" event={"ID":"596ef49c-d75c-43cf-a61e-8748ccb926a2","Type":"ContainerStarted","Data":"78191291aa93d814a8f8d23a6dc1039444744ea769afecb8d4dacc0065986318"} Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.401007 4813 generic.go:334] "Generic (PLEG): container finished" podID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerID="80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22" exitCode=0 Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.401101 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.401329 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"29b5cee9-5160-4f31-a9ab-733e1ef673ba","Type":"ContainerDied","Data":"80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22"} Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.401439 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"29b5cee9-5160-4f31-a9ab-733e1ef673ba","Type":"ContainerDied","Data":"1c082bbdb1a379e2b199598c0b3c29753b4c95e70d47d0b4697bf05fbdf49797"} Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.401461 4813 scope.go:117] "RemoveContainer" containerID="80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.404533 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"ecc73847-6525-4f9a-8323-334a1b7eec64","Type":"ContainerDied","Data":"ed2cfb6307e9400e472bb86fad908b038b284e6c9fbd17215034982da61fe4b3"} Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.404760 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.423656 4813 scope.go:117] "RemoveContainer" containerID="3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.435565 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-custom-prometheus-ca\") pod \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.436153 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29b5cee9-5160-4f31-a9ab-733e1ef673ba-logs\") pod \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.436275 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-combined-ca-bundle\") pod \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.436374 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnz8h\" (UniqueName: \"kubernetes.io/projected/29b5cee9-5160-4f31-a9ab-733e1ef673ba-kube-api-access-lnz8h\") pod \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.436420 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-config-data\") pod \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\" (UID: \"29b5cee9-5160-4f31-a9ab-733e1ef673ba\") " Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.436777 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29b5cee9-5160-4f31-a9ab-733e1ef673ba-logs" (OuterVolumeSpecName: "logs") pod "29b5cee9-5160-4f31-a9ab-733e1ef673ba" (UID: "29b5cee9-5160-4f31-a9ab-733e1ef673ba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.459693 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29b5cee9-5160-4f31-a9ab-733e1ef673ba-kube-api-access-lnz8h" (OuterVolumeSpecName: "kube-api-access-lnz8h") pod "29b5cee9-5160-4f31-a9ab-733e1ef673ba" (UID: "29b5cee9-5160-4f31-a9ab-733e1ef673ba"). InnerVolumeSpecName "kube-api-access-lnz8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.464265 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.470721 4813 scope.go:117] "RemoveContainer" containerID="80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.471215 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22\": container with ID starting with 80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22 not found: ID does not exist" containerID="80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.471278 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22"} err="failed to get container status \"80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22\": rpc error: code = NotFound desc = could not find container \"80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22\": container with ID starting with 80d655a7acf3f53da80000d421a0dfe2fd4fcb0dd3202de111424abc0bcdaa22 not found: ID does not exist" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.471303 4813 scope.go:117] "RemoveContainer" containerID="3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.473131 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda\": container with ID starting with 3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda not found: ID does not exist" containerID="3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.473173 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda"} err="failed to get container status \"3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda\": rpc error: code = NotFound desc = could not find container \"3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda\": container with ID starting with 3d8080b80c7df77039ef5890eaa6c94ba261446491a4bbe2a47eadefa3f1ecda not found: ID does not exist" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.473199 4813 scope.go:117] "RemoveContainer" containerID="be153c0d60bc666aba884582a3c4ef6446568e61098e5d4197a4bfff80e24a64" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.485283 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.492796 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29b5cee9-5160-4f31-a9ab-733e1ef673ba" (UID: "29b5cee9-5160-4f31-a9ab-733e1ef673ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.493558 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.493898 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b36cb93-16d8-40e3-bff6-45a70579002e" containerName="oc" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.493914 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b36cb93-16d8-40e3-bff6-45a70579002e" containerName="oc" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.493926 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="sg-core" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.493933 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="sg-core" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.493948 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="proxy-httpd" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.493954 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="proxy-httpd" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.493974 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-central-agent" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.493980 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-central-agent" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.493991 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-kuttl-api-log" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.493997 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-kuttl-api-log" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.494011 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-api" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494017 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-api" Mar 20 16:18:06 crc kubenswrapper[4813]: E0320 16:18:06.494027 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-notification-agent" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494033 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-notification-agent" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494238 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-api" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494259 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="proxy-httpd" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494271 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" containerName="watcher-kuttl-api-log" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494284 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="sg-core" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494298 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-notification-agent" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494307 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b36cb93-16d8-40e3-bff6-45a70579002e" containerName="oc" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.494318 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" containerName="ceilometer-central-agent" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.495010 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "29b5cee9-5160-4f31-a9ab-733e1ef673ba" (UID: "29b5cee9-5160-4f31-a9ab-733e1ef673ba"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.495711 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.498724 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.498952 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.499610 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.536556 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.541339 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.541364 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnz8h\" (UniqueName: \"kubernetes.io/projected/29b5cee9-5160-4f31-a9ab-733e1ef673ba-kube-api-access-lnz8h\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.541373 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.541381 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29b5cee9-5160-4f31-a9ab-733e1ef673ba-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.543645 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-config-data" (OuterVolumeSpecName: "config-data") pod "29b5cee9-5160-4f31-a9ab-733e1ef673ba" (UID: "29b5cee9-5160-4f31-a9ab-733e1ef673ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.547464 4813 scope.go:117] "RemoveContainer" containerID="c3ecf7c79f34b497b4fae7ea66fe7c381b82d74811b1f5c93a737233f17a2e12" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.572113 4813 scope.go:117] "RemoveContainer" containerID="8bdc6016a55d55ea5f6f209fc1c2d578a2b162f8155afd0cd188348715ca00d6" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.593472 4813 scope.go:117] "RemoveContainer" containerID="16f85b099c64bd806975d1ed66076ec2817d286749eaa5e0a12d43d5ac321273" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642389 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-config-data\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642449 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-scripts\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642494 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-log-httpd\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642586 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-run-httpd\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642671 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642724 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.642749 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfb9f\" (UniqueName: \"kubernetes.io/projected/7147803c-236b-44ef-a145-b050941f8988-kube-api-access-lfb9f\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.643207 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b5cee9-5160-4f31-a9ab-733e1ef673ba-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.734498 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.741860 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.744908 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-scripts\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.744959 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-log-httpd\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745020 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745061 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-run-httpd\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745094 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745136 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfb9f\" (UniqueName: \"kubernetes.io/projected/7147803c-236b-44ef-a145-b050941f8988-kube-api-access-lfb9f\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745157 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745262 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-config-data\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745598 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-log-httpd\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.745921 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-run-httpd\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.749303 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.749350 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-scripts\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.750089 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.750705 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-config-data\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.750955 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.761422 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfb9f\" (UniqueName: \"kubernetes.io/projected/7147803c-236b-44ef-a145-b050941f8988-kube-api-access-lfb9f\") pod \"ceilometer-0\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:06 crc kubenswrapper[4813]: I0320 16:18:06.857807 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.275961 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29b5cee9-5160-4f31-a9ab-733e1ef673ba" path="/var/lib/kubelet/pods/29b5cee9-5160-4f31-a9ab-733e1ef673ba/volumes" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.277165 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c3ffb29-7274-4ce3-be5a-f5819f5ac12f" path="/var/lib/kubelet/pods/6c3ffb29-7274-4ce3-be5a-f5819f5ac12f/volumes" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.277977 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc73847-6525-4f9a-8323-334a1b7eec64" path="/var/lib/kubelet/pods/ecc73847-6525-4f9a-8323-334a1b7eec64/volumes" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.317238 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.368365 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:07 crc kubenswrapper[4813]: W0320 16:18:07.386142 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7147803c_236b_44ef_a145_b050941f8988.slice/crio-3b3efdbc61d04d2c9922279701b458c58284bc9071ec1ee614441c3c8cfd9ded WatchSource:0}: Error finding container 3b3efdbc61d04d2c9922279701b458c58284bc9071ec1ee614441c3c8cfd9ded: Status 404 returned error can't find the container with id 3b3efdbc61d04d2c9922279701b458c58284bc9071ec1ee614441c3c8cfd9ded Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.411617 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerStarted","Data":"3b3efdbc61d04d2c9922279701b458c58284bc9071ec1ee614441c3c8cfd9ded"} Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.739414 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.862922 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhxlm\" (UniqueName: \"kubernetes.io/projected/596ef49c-d75c-43cf-a61e-8748ccb926a2-kube-api-access-fhxlm\") pod \"596ef49c-d75c-43cf-a61e-8748ccb926a2\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.863132 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ef49c-d75c-43cf-a61e-8748ccb926a2-operator-scripts\") pod \"596ef49c-d75c-43cf-a61e-8748ccb926a2\" (UID: \"596ef49c-d75c-43cf-a61e-8748ccb926a2\") " Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.863774 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/596ef49c-d75c-43cf-a61e-8748ccb926a2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "596ef49c-d75c-43cf-a61e-8748ccb926a2" (UID: "596ef49c-d75c-43cf-a61e-8748ccb926a2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.875171 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596ef49c-d75c-43cf-a61e-8748ccb926a2-kube-api-access-fhxlm" (OuterVolumeSpecName: "kube-api-access-fhxlm") pod "596ef49c-d75c-43cf-a61e-8748ccb926a2" (UID: "596ef49c-d75c-43cf-a61e-8748ccb926a2"). InnerVolumeSpecName "kube-api-access-fhxlm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.965146 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhxlm\" (UniqueName: \"kubernetes.io/projected/596ef49c-d75c-43cf-a61e-8748ccb926a2-kube-api-access-fhxlm\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:07 crc kubenswrapper[4813]: I0320 16:18:07.965500 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ef49c-d75c-43cf-a61e-8748ccb926a2-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:08 crc kubenswrapper[4813]: E0320 16:18:08.067525 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:08 crc kubenswrapper[4813]: E0320 16:18:08.067591 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data podName:b5b66c98-4310-4442-a9a6-25a828d1c1cc nodeName:}" failed. No retries permitted until 2026-03-20 16:18:12.067575993 +0000 UTC m=+2421.490278834 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.421733 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerStarted","Data":"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba"} Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.424108 4813 generic.go:334] "Generic (PLEG): container finished" podID="71dbbcff-6731-4ec5-8f1a-7233a3d105bc" containerID="fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67" exitCode=0 Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.424185 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"71dbbcff-6731-4ec5-8f1a-7233a3d105bc","Type":"ContainerDied","Data":"fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67"} Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.426303 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchera660-account-delete-pww8c" event={"ID":"596ef49c-d75c-43cf-a61e-8748ccb926a2","Type":"ContainerDied","Data":"78191291aa93d814a8f8d23a6dc1039444744ea769afecb8d4dacc0065986318"} Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.426331 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78191291aa93d814a8f8d23a6dc1039444744ea769afecb8d4dacc0065986318" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.426399 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchera660-account-delete-pww8c" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.540045 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.675695 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lkps\" (UniqueName: \"kubernetes.io/projected/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-kube-api-access-9lkps\") pod \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.675760 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-config-data\") pod \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.675871 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-combined-ca-bundle\") pod \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.675920 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-logs\") pod \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\" (UID: \"71dbbcff-6731-4ec5-8f1a-7233a3d105bc\") " Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.676804 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-logs" (OuterVolumeSpecName: "logs") pod "71dbbcff-6731-4ec5-8f1a-7233a3d105bc" (UID: "71dbbcff-6731-4ec5-8f1a-7233a3d105bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.682108 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-kube-api-access-9lkps" (OuterVolumeSpecName: "kube-api-access-9lkps") pod "71dbbcff-6731-4ec5-8f1a-7233a3d105bc" (UID: "71dbbcff-6731-4ec5-8f1a-7233a3d105bc"). InnerVolumeSpecName "kube-api-access-9lkps". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.697697 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.697738 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lkps\" (UniqueName: \"kubernetes.io/projected/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-kube-api-access-9lkps\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.705633 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "71dbbcff-6731-4ec5-8f1a-7233a3d105bc" (UID: "71dbbcff-6731-4ec5-8f1a-7233a3d105bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.739655 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-config-data" (OuterVolumeSpecName: "config-data") pod "71dbbcff-6731-4ec5-8f1a-7233a3d105bc" (UID: "71dbbcff-6731-4ec5-8f1a-7233a3d105bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.809301 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:08 crc kubenswrapper[4813]: I0320 16:18:08.809346 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71dbbcff-6731-4ec5-8f1a-7233a3d105bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.464279 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qnpth"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.469308 4813 generic.go:334] "Generic (PLEG): container finished" podID="b5b66c98-4310-4442-a9a6-25a828d1c1cc" containerID="7c008e4b8cde5be1afc5c20235c3cb8c2a460538f7cbebdbbbaeb7e54c2763e8" exitCode=0 Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.469395 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"b5b66c98-4310-4442-a9a6-25a828d1c1cc","Type":"ContainerDied","Data":"7c008e4b8cde5be1afc5c20235c3cb8c2a460538f7cbebdbbbaeb7e54c2763e8"} Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.470374 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qnpth"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.477239 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-a660-account-create-update-9vc6b"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.482938 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watchera660-account-delete-pww8c"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.483221 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"71dbbcff-6731-4ec5-8f1a-7233a3d105bc","Type":"ContainerDied","Data":"9605d374c89b47be11501a9e9d5993b36056b63278b5506c506aa0ea20320df1"} Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.483255 4813 scope.go:117] "RemoveContainer" containerID="fac4cc40a7cc20a3d107875e0f937fe2a48c9f12720dcf37baec7392ff927c67" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.483378 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.490733 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-a660-account-create-update-9vc6b"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.496632 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watchera660-account-delete-pww8c"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.503872 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.511629 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.635995 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.824183 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-custom-prometheus-ca\") pod \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.824247 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-combined-ca-bundle\") pod \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.824798 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data\") pod \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.824876 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b66c98-4310-4442-a9a6-25a828d1c1cc-logs\") pod \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.824956 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr7kp\" (UniqueName: \"kubernetes.io/projected/b5b66c98-4310-4442-a9a6-25a828d1c1cc-kube-api-access-cr7kp\") pod \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\" (UID: \"b5b66c98-4310-4442-a9a6-25a828d1c1cc\") " Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.829895 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5b66c98-4310-4442-a9a6-25a828d1c1cc-logs" (OuterVolumeSpecName: "logs") pod "b5b66c98-4310-4442-a9a6-25a828d1c1cc" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.841941 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5b66c98-4310-4442-a9a6-25a828d1c1cc-kube-api-access-cr7kp" (OuterVolumeSpecName: "kube-api-access-cr7kp") pod "b5b66c98-4310-4442-a9a6-25a828d1c1cc" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc"). InnerVolumeSpecName "kube-api-access-cr7kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.875595 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5b66c98-4310-4442-a9a6-25a828d1c1cc" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.894874 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "b5b66c98-4310-4442-a9a6-25a828d1c1cc" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.927399 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b66c98-4310-4442-a9a6-25a828d1c1cc-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.927446 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr7kp\" (UniqueName: \"kubernetes.io/projected/b5b66c98-4310-4442-a9a6-25a828d1c1cc-kube-api-access-cr7kp\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.927455 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.927464 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:09 crc kubenswrapper[4813]: I0320 16:18:09.927931 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data" (OuterVolumeSpecName: "config-data") pod "b5b66c98-4310-4442-a9a6-25a828d1c1cc" (UID: "b5b66c98-4310-4442-a9a6-25a828d1c1cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.028569 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b66c98-4310-4442-a9a6-25a828d1c1cc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.496363 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerStarted","Data":"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411"} Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.496438 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerStarted","Data":"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed"} Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.497937 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"b5b66c98-4310-4442-a9a6-25a828d1c1cc","Type":"ContainerDied","Data":"de7e473d4bc02abb27368ae0540f5690f237483d9c50aa87b117de1664330642"} Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.498001 4813 scope.go:117] "RemoveContainer" containerID="7c008e4b8cde5be1afc5c20235c3cb8c2a460538f7cbebdbbbaeb7e54c2763e8" Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.498034 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.542294 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:10 crc kubenswrapper[4813]: I0320 16:18:10.550033 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.277574 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04bba761-d948-40dd-8c46-b1b8327d7fb4" path="/var/lib/kubelet/pods/04bba761-d948-40dd-8c46-b1b8327d7fb4/volumes" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.278467 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="463e1938-e4b8-4ac9-b3cb-f149330a1efa" path="/var/lib/kubelet/pods/463e1938-e4b8-4ac9-b3cb-f149330a1efa/volumes" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.278986 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596ef49c-d75c-43cf-a61e-8748ccb926a2" path="/var/lib/kubelet/pods/596ef49c-d75c-43cf-a61e-8748ccb926a2/volumes" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.281230 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71dbbcff-6731-4ec5-8f1a-7233a3d105bc" path="/var/lib/kubelet/pods/71dbbcff-6731-4ec5-8f1a-7233a3d105bc/volumes" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.281963 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5b66c98-4310-4442-a9a6-25a828d1c1cc" path="/var/lib/kubelet/pods/b5b66c98-4310-4442-a9a6-25a828d1c1cc/volumes" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.639499 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-4z67w"] Mar 20 16:18:11 crc kubenswrapper[4813]: E0320 16:18:11.640005 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596ef49c-d75c-43cf-a61e-8748ccb926a2" containerName="mariadb-account-delete" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.640017 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="596ef49c-d75c-43cf-a61e-8748ccb926a2" containerName="mariadb-account-delete" Mar 20 16:18:11 crc kubenswrapper[4813]: E0320 16:18:11.640051 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5b66c98-4310-4442-a9a6-25a828d1c1cc" containerName="watcher-decision-engine" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.640057 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5b66c98-4310-4442-a9a6-25a828d1c1cc" containerName="watcher-decision-engine" Mar 20 16:18:11 crc kubenswrapper[4813]: E0320 16:18:11.640065 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71dbbcff-6731-4ec5-8f1a-7233a3d105bc" containerName="watcher-applier" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.640071 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="71dbbcff-6731-4ec5-8f1a-7233a3d105bc" containerName="watcher-applier" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.640206 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5b66c98-4310-4442-a9a6-25a828d1c1cc" containerName="watcher-decision-engine" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.640226 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="596ef49c-d75c-43cf-a61e-8748ccb926a2" containerName="mariadb-account-delete" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.640236 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="71dbbcff-6731-4ec5-8f1a-7233a3d105bc" containerName="watcher-applier" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.643663 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.681262 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-4z67w"] Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.698091 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-7235-account-create-update-h27lb"] Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.719014 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.723021 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.744692 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-7235-account-create-update-h27lb"] Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.771973 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-operator-scripts\") pod \"watcher-7235-account-create-update-h27lb\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.772051 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/585ae5c1-6601-4d39-a127-9a5e1abf1c54-operator-scripts\") pod \"watcher-db-create-4z67w\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.772111 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jx7c\" (UniqueName: \"kubernetes.io/projected/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-kube-api-access-7jx7c\") pod \"watcher-7235-account-create-update-h27lb\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.772256 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwbrc\" (UniqueName: \"kubernetes.io/projected/585ae5c1-6601-4d39-a127-9a5e1abf1c54-kube-api-access-vwbrc\") pod \"watcher-db-create-4z67w\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.875029 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jx7c\" (UniqueName: \"kubernetes.io/projected/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-kube-api-access-7jx7c\") pod \"watcher-7235-account-create-update-h27lb\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.875147 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwbrc\" (UniqueName: \"kubernetes.io/projected/585ae5c1-6601-4d39-a127-9a5e1abf1c54-kube-api-access-vwbrc\") pod \"watcher-db-create-4z67w\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.875208 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-operator-scripts\") pod \"watcher-7235-account-create-update-h27lb\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.875246 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/585ae5c1-6601-4d39-a127-9a5e1abf1c54-operator-scripts\") pod \"watcher-db-create-4z67w\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.876170 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/585ae5c1-6601-4d39-a127-9a5e1abf1c54-operator-scripts\") pod \"watcher-db-create-4z67w\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.878459 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-operator-scripts\") pod \"watcher-7235-account-create-update-h27lb\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.898010 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jx7c\" (UniqueName: \"kubernetes.io/projected/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-kube-api-access-7jx7c\") pod \"watcher-7235-account-create-update-h27lb\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:11 crc kubenswrapper[4813]: I0320 16:18:11.901976 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwbrc\" (UniqueName: \"kubernetes.io/projected/585ae5c1-6601-4d39-a127-9a5e1abf1c54-kube-api-access-vwbrc\") pod \"watcher-db-create-4z67w\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.065111 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.091748 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.538266 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-4z67w"] Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.548284 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerStarted","Data":"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b"} Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.548528 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-central-agent" containerID="cri-o://9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" gracePeriod=30 Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.549171 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.551941 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-notification-agent" containerID="cri-o://6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" gracePeriod=30 Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.552013 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="proxy-httpd" containerID="cri-o://54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" gracePeriod=30 Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.552172 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="sg-core" containerID="cri-o://e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" gracePeriod=30 Mar 20 16:18:12 crc kubenswrapper[4813]: W0320 16:18:12.562906 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod585ae5c1_6601_4d39_a127_9a5e1abf1c54.slice/crio-2e5da060dd52ed2e56244e084951d1b7597579c046530904c3a20d2f5d4f13e6 WatchSource:0}: Error finding container 2e5da060dd52ed2e56244e084951d1b7597579c046530904c3a20d2f5d4f13e6: Status 404 returned error can't find the container with id 2e5da060dd52ed2e56244e084951d1b7597579c046530904c3a20d2f5d4f13e6 Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.595894 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.404970066 podStartE2EDuration="6.595871058s" podCreationTimestamp="2026-03-20 16:18:06 +0000 UTC" firstStartedPulling="2026-03-20 16:18:07.3883019 +0000 UTC m=+2416.811004741" lastFinishedPulling="2026-03-20 16:18:11.579202892 +0000 UTC m=+2421.001905733" observedRunningTime="2026-03-20 16:18:12.583179566 +0000 UTC m=+2422.005882397" watchObservedRunningTime="2026-03-20 16:18:12.595871058 +0000 UTC m=+2422.018573899" Mar 20 16:18:12 crc kubenswrapper[4813]: I0320 16:18:12.651597 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-7235-account-create-update-h27lb"] Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.355936 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.403929 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-sg-core-conf-yaml\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.403989 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-config-data\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.404025 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-scripts\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.404075 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-combined-ca-bundle\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.404102 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-run-httpd\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.404126 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-log-httpd\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.404147 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-ceilometer-tls-certs\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.404167 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfb9f\" (UniqueName: \"kubernetes.io/projected/7147803c-236b-44ef-a145-b050941f8988-kube-api-access-lfb9f\") pod \"7147803c-236b-44ef-a145-b050941f8988\" (UID: \"7147803c-236b-44ef-a145-b050941f8988\") " Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.404933 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.405026 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.410443 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7147803c-236b-44ef-a145-b050941f8988-kube-api-access-lfb9f" (OuterVolumeSpecName: "kube-api-access-lfb9f") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "kube-api-access-lfb9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.410444 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-scripts" (OuterVolumeSpecName: "scripts") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.431327 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.445721 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.461305 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.474470 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-config-data" (OuterVolumeSpecName: "config-data") pod "7147803c-236b-44ef-a145-b050941f8988" (UID: "7147803c-236b-44ef-a145-b050941f8988"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505779 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505818 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505830 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505844 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505857 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505869 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7147803c-236b-44ef-a145-b050941f8988-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505881 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7147803c-236b-44ef-a145-b050941f8988-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.505893 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfb9f\" (UniqueName: \"kubernetes.io/projected/7147803c-236b-44ef-a145-b050941f8988-kube-api-access-lfb9f\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562234 4813 generic.go:334] "Generic (PLEG): container finished" podID="7147803c-236b-44ef-a145-b050941f8988" containerID="54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" exitCode=0 Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562266 4813 generic.go:334] "Generic (PLEG): container finished" podID="7147803c-236b-44ef-a145-b050941f8988" containerID="e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" exitCode=2 Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562273 4813 generic.go:334] "Generic (PLEG): container finished" podID="7147803c-236b-44ef-a145-b050941f8988" containerID="6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" exitCode=0 Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562286 4813 generic.go:334] "Generic (PLEG): container finished" podID="7147803c-236b-44ef-a145-b050941f8988" containerID="9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" exitCode=0 Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562324 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerDied","Data":"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562350 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerDied","Data":"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562360 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerDied","Data":"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562369 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerDied","Data":"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562378 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7147803c-236b-44ef-a145-b050941f8988","Type":"ContainerDied","Data":"3b3efdbc61d04d2c9922279701b458c58284bc9071ec1ee614441c3c8cfd9ded"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562393 4813 scope.go:117] "RemoveContainer" containerID="54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.562562 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.565595 4813 generic.go:334] "Generic (PLEG): container finished" podID="5f8a88e5-87f3-4ada-8618-97f7f7ea28ca" containerID="778a3f275b2bfba5a450191f7949472cee90c5864640a023ec9066846956ccc9" exitCode=0 Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.565656 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" event={"ID":"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca","Type":"ContainerDied","Data":"778a3f275b2bfba5a450191f7949472cee90c5864640a023ec9066846956ccc9"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.565675 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" event={"ID":"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca","Type":"ContainerStarted","Data":"f2d61fd5c2a1e51eb511e55013626dd5eed2163edc7ec46df49e6743db5e056e"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.567937 4813 generic.go:334] "Generic (PLEG): container finished" podID="585ae5c1-6601-4d39-a127-9a5e1abf1c54" containerID="0636ab555da1451d7cf1a4bc9bd1dad4be06b0d652f040639143e6d838022e33" exitCode=0 Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.567985 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-4z67w" event={"ID":"585ae5c1-6601-4d39-a127-9a5e1abf1c54","Type":"ContainerDied","Data":"0636ab555da1451d7cf1a4bc9bd1dad4be06b0d652f040639143e6d838022e33"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.568013 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-4z67w" event={"ID":"585ae5c1-6601-4d39-a127-9a5e1abf1c54","Type":"ContainerStarted","Data":"2e5da060dd52ed2e56244e084951d1b7597579c046530904c3a20d2f5d4f13e6"} Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.583933 4813 scope.go:117] "RemoveContainer" containerID="e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.603143 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.614267 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.637622 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.638154 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-central-agent" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638169 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-central-agent" Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.638180 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-notification-agent" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638185 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-notification-agent" Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.638192 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="proxy-httpd" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638199 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="proxy-httpd" Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.638230 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="sg-core" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638235 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="sg-core" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638368 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-central-agent" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638379 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="proxy-httpd" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638386 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="ceilometer-notification-agent" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.638394 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7147803c-236b-44ef-a145-b050941f8988" containerName="sg-core" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.639902 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.648152 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.648895 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.649022 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.654575 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.682186 4813 scope.go:117] "RemoveContainer" containerID="6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.698726 4813 scope.go:117] "RemoveContainer" containerID="9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.713602 4813 scope.go:117] "RemoveContainer" containerID="54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.714007 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": container with ID starting with 54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b not found: ID does not exist" containerID="54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.714049 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b"} err="failed to get container status \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": rpc error: code = NotFound desc = could not find container \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": container with ID starting with 54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.714075 4813 scope.go:117] "RemoveContainer" containerID="e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.714343 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": container with ID starting with e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411 not found: ID does not exist" containerID="e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.714385 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411"} err="failed to get container status \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": rpc error: code = NotFound desc = could not find container \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": container with ID starting with e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411 not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.714411 4813 scope.go:117] "RemoveContainer" containerID="6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.714846 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": container with ID starting with 6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed not found: ID does not exist" containerID="6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.714866 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed"} err="failed to get container status \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": rpc error: code = NotFound desc = could not find container \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": container with ID starting with 6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.714881 4813 scope.go:117] "RemoveContainer" containerID="9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" Mar 20 16:18:13 crc kubenswrapper[4813]: E0320 16:18:13.715123 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": container with ID starting with 9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba not found: ID does not exist" containerID="9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.715152 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba"} err="failed to get container status \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": rpc error: code = NotFound desc = could not find container \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": container with ID starting with 9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.715170 4813 scope.go:117] "RemoveContainer" containerID="54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.715475 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b"} err="failed to get container status \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": rpc error: code = NotFound desc = could not find container \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": container with ID starting with 54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.715518 4813 scope.go:117] "RemoveContainer" containerID="e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.715908 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411"} err="failed to get container status \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": rpc error: code = NotFound desc = could not find container \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": container with ID starting with e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411 not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.715932 4813 scope.go:117] "RemoveContainer" containerID="6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.716217 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed"} err="failed to get container status \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": rpc error: code = NotFound desc = could not find container \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": container with ID starting with 6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.716240 4813 scope.go:117] "RemoveContainer" containerID="9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.716454 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba"} err="failed to get container status \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": rpc error: code = NotFound desc = could not find container \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": container with ID starting with 9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.716478 4813 scope.go:117] "RemoveContainer" containerID="54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.716814 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b"} err="failed to get container status \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": rpc error: code = NotFound desc = could not find container \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": container with ID starting with 54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.716835 4813 scope.go:117] "RemoveContainer" containerID="e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.717093 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411"} err="failed to get container status \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": rpc error: code = NotFound desc = could not find container \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": container with ID starting with e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411 not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.717118 4813 scope.go:117] "RemoveContainer" containerID="6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.717325 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed"} err="failed to get container status \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": rpc error: code = NotFound desc = could not find container \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": container with ID starting with 6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.717351 4813 scope.go:117] "RemoveContainer" containerID="9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.717764 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba"} err="failed to get container status \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": rpc error: code = NotFound desc = could not find container \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": container with ID starting with 9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.717943 4813 scope.go:117] "RemoveContainer" containerID="54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.718300 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b"} err="failed to get container status \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": rpc error: code = NotFound desc = could not find container \"54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b\": container with ID starting with 54af87fb417b31c2072c7e71bad0d0458ac532c352955e22107dca262b0afc7b not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.718322 4813 scope.go:117] "RemoveContainer" containerID="e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.718659 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411"} err="failed to get container status \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": rpc error: code = NotFound desc = could not find container \"e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411\": container with ID starting with e7cd98092a6a7983bf7245b83e673252e66e3780f5cde5763fece32f7c093411 not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.718750 4813 scope.go:117] "RemoveContainer" containerID="6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.719118 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed"} err="failed to get container status \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": rpc error: code = NotFound desc = could not find container \"6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed\": container with ID starting with 6a3447cff34d9f68ef190fcf2051a86abf811b251db53a6946e686effd1526ed not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.719144 4813 scope.go:117] "RemoveContainer" containerID="9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.719344 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba"} err="failed to get container status \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": rpc error: code = NotFound desc = could not find container \"9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba\": container with ID starting with 9009b5132f50f1c989e3794960f1f867668491f446419295cbe2e74851c760ba not found: ID does not exist" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815086 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kdrr\" (UniqueName: \"kubernetes.io/projected/5c660b70-96a8-49fa-927e-a04a97ccf486-kube-api-access-8kdrr\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815134 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-run-httpd\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815153 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-scripts\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815209 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-config-data\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815227 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-log-httpd\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815250 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.815270 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.917070 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.917408 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.917649 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kdrr\" (UniqueName: \"kubernetes.io/projected/5c660b70-96a8-49fa-927e-a04a97ccf486-kube-api-access-8kdrr\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.917762 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-run-httpd\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.917905 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-scripts\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.918028 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.918128 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-config-data\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.918225 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-log-httpd\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.918658 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-run-httpd\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.918672 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-log-httpd\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.922000 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.925476 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.927229 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.930851 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-config-data\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.938660 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-scripts\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.943383 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kdrr\" (UniqueName: \"kubernetes.io/projected/5c660b70-96a8-49fa-927e-a04a97ccf486-kube-api-access-8kdrr\") pod \"ceilometer-0\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:13 crc kubenswrapper[4813]: I0320 16:18:13.984202 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:14 crc kubenswrapper[4813]: I0320 16:18:14.462895 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:14 crc kubenswrapper[4813]: I0320 16:18:14.575956 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerStarted","Data":"523d25901daf7d82b5fe001cb77c759b4ff54f5192e8a7686761ff257428175f"} Mar 20 16:18:14 crc kubenswrapper[4813]: I0320 16:18:14.963370 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.043359 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.141841 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwbrc\" (UniqueName: \"kubernetes.io/projected/585ae5c1-6601-4d39-a127-9a5e1abf1c54-kube-api-access-vwbrc\") pod \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.141913 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/585ae5c1-6601-4d39-a127-9a5e1abf1c54-operator-scripts\") pod \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\" (UID: \"585ae5c1-6601-4d39-a127-9a5e1abf1c54\") " Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.142093 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-operator-scripts\") pod \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.142130 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jx7c\" (UniqueName: \"kubernetes.io/projected/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-kube-api-access-7jx7c\") pod \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\" (UID: \"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca\") " Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.144039 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/585ae5c1-6601-4d39-a127-9a5e1abf1c54-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "585ae5c1-6601-4d39-a127-9a5e1abf1c54" (UID: "585ae5c1-6601-4d39-a127-9a5e1abf1c54"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.144555 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f8a88e5-87f3-4ada-8618-97f7f7ea28ca" (UID: "5f8a88e5-87f3-4ada-8618-97f7f7ea28ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.147150 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-kube-api-access-7jx7c" (OuterVolumeSpecName: "kube-api-access-7jx7c") pod "5f8a88e5-87f3-4ada-8618-97f7f7ea28ca" (UID: "5f8a88e5-87f3-4ada-8618-97f7f7ea28ca"). InnerVolumeSpecName "kube-api-access-7jx7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.153690 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/585ae5c1-6601-4d39-a127-9a5e1abf1c54-kube-api-access-vwbrc" (OuterVolumeSpecName: "kube-api-access-vwbrc") pod "585ae5c1-6601-4d39-a127-9a5e1abf1c54" (UID: "585ae5c1-6601-4d39-a127-9a5e1abf1c54"). InnerVolumeSpecName "kube-api-access-vwbrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.243827 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.243857 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jx7c\" (UniqueName: \"kubernetes.io/projected/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca-kube-api-access-7jx7c\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.243868 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwbrc\" (UniqueName: \"kubernetes.io/projected/585ae5c1-6601-4d39-a127-9a5e1abf1c54-kube-api-access-vwbrc\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.243876 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/585ae5c1-6601-4d39-a127-9a5e1abf1c54-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.278985 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7147803c-236b-44ef-a145-b050941f8988" path="/var/lib/kubelet/pods/7147803c-236b-44ef-a145-b050941f8988/volumes" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.603504 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-4z67w" event={"ID":"585ae5c1-6601-4d39-a127-9a5e1abf1c54","Type":"ContainerDied","Data":"2e5da060dd52ed2e56244e084951d1b7597579c046530904c3a20d2f5d4f13e6"} Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.603716 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e5da060dd52ed2e56244e084951d1b7597579c046530904c3a20d2f5d4f13e6" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.603749 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-4z67w" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.605430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerStarted","Data":"2164c0f96e3b1574bcb9688f49455edca5d428009ba403c344e3c07a76e3355d"} Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.607384 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" event={"ID":"5f8a88e5-87f3-4ada-8618-97f7f7ea28ca","Type":"ContainerDied","Data":"f2d61fd5c2a1e51eb511e55013626dd5eed2163edc7ec46df49e6743db5e056e"} Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.607412 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2d61fd5c2a1e51eb511e55013626dd5eed2163edc7ec46df49e6743db5e056e" Mar 20 16:18:15 crc kubenswrapper[4813]: I0320 16:18:15.607449 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-7235-account-create-update-h27lb" Mar 20 16:18:16 crc kubenswrapper[4813]: I0320 16:18:16.625321 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerStarted","Data":"fe4d52dec7e94a335cba528228e0255a87ce66bbe0e7a45660e1adc86d64bba5"} Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.007045 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv"] Mar 20 16:18:17 crc kubenswrapper[4813]: E0320 16:18:17.007368 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8a88e5-87f3-4ada-8618-97f7f7ea28ca" containerName="mariadb-account-create-update" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.007388 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8a88e5-87f3-4ada-8618-97f7f7ea28ca" containerName="mariadb-account-create-update" Mar 20 16:18:17 crc kubenswrapper[4813]: E0320 16:18:17.007428 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585ae5c1-6601-4d39-a127-9a5e1abf1c54" containerName="mariadb-database-create" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.007438 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="585ae5c1-6601-4d39-a127-9a5e1abf1c54" containerName="mariadb-database-create" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.007617 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f8a88e5-87f3-4ada-8618-97f7f7ea28ca" containerName="mariadb-account-create-update" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.007634 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="585ae5c1-6601-4d39-a127-9a5e1abf1c54" containerName="mariadb-database-create" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.008183 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.010918 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.012185 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-xc8c4" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.017666 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv"] Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.172891 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-config-data\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.173198 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tftqf\" (UniqueName: \"kubernetes.io/projected/4e617d60-d1d5-4c4e-97f7-d2cb13743714-kube-api-access-tftqf\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.173302 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-db-sync-config-data\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.173348 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.275123 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tftqf\" (UniqueName: \"kubernetes.io/projected/4e617d60-d1d5-4c4e-97f7-d2cb13743714-kube-api-access-tftqf\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.275183 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-db-sync-config-data\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.275213 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.275285 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-config-data\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.282014 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-db-sync-config-data\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.282125 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.282244 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-config-data\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.292756 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tftqf\" (UniqueName: \"kubernetes.io/projected/4e617d60-d1d5-4c4e-97f7-d2cb13743714-kube-api-access-tftqf\") pod \"watcher-kuttl-db-sync-b7cdv\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.324239 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.540789 4813 scope.go:117] "RemoveContainer" containerID="1dbac01ec63c94244bb30858d747d00b5759b1b627a744d588782563f4881b63" Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.637822 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerStarted","Data":"650c5c8dbcdbeb0d07a1ace2ead1e570a3a7a21aed8e28e03e9e771673b93a8c"} Mar 20 16:18:17 crc kubenswrapper[4813]: I0320 16:18:17.839906 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv"] Mar 20 16:18:18 crc kubenswrapper[4813]: I0320 16:18:18.646267 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" event={"ID":"4e617d60-d1d5-4c4e-97f7-d2cb13743714","Type":"ContainerStarted","Data":"180af0d8f1c13811b7eacd608c555bdd520458a09c96dab5e89aea845c2544c4"} Mar 20 16:18:18 crc kubenswrapper[4813]: I0320 16:18:18.646965 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" event={"ID":"4e617d60-d1d5-4c4e-97f7-d2cb13743714","Type":"ContainerStarted","Data":"30f4fc3350d274800695c41a41db39a4aacf8762657baef2047f6e8d9d311814"} Mar 20 16:18:18 crc kubenswrapper[4813]: I0320 16:18:18.670082 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" podStartSLOduration=2.670059865 podStartE2EDuration="2.670059865s" podCreationTimestamp="2026-03-20 16:18:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:18.663829997 +0000 UTC m=+2428.086532838" watchObservedRunningTime="2026-03-20 16:18:18.670059865 +0000 UTC m=+2428.092762706" Mar 20 16:18:19 crc kubenswrapper[4813]: I0320 16:18:19.656946 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerStarted","Data":"fae32c8b6fcc10c795902851de40d2505c597ffa4bb420cef7deaff0feb4a863"} Mar 20 16:18:19 crc kubenswrapper[4813]: I0320 16:18:19.657278 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:19 crc kubenswrapper[4813]: I0320 16:18:19.698290 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.488398887 podStartE2EDuration="6.698272591s" podCreationTimestamp="2026-03-20 16:18:13 +0000 UTC" firstStartedPulling="2026-03-20 16:18:14.463332225 +0000 UTC m=+2423.886035066" lastFinishedPulling="2026-03-20 16:18:18.673205929 +0000 UTC m=+2428.095908770" observedRunningTime="2026-03-20 16:18:19.69340145 +0000 UTC m=+2429.116104291" watchObservedRunningTime="2026-03-20 16:18:19.698272591 +0000 UTC m=+2429.120975432" Mar 20 16:18:20 crc kubenswrapper[4813]: I0320 16:18:20.666975 4813 generic.go:334] "Generic (PLEG): container finished" podID="4e617d60-d1d5-4c4e-97f7-d2cb13743714" containerID="180af0d8f1c13811b7eacd608c555bdd520458a09c96dab5e89aea845c2544c4" exitCode=0 Mar 20 16:18:20 crc kubenswrapper[4813]: I0320 16:18:20.667057 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" event={"ID":"4e617d60-d1d5-4c4e-97f7-d2cb13743714","Type":"ContainerDied","Data":"180af0d8f1c13811b7eacd608c555bdd520458a09c96dab5e89aea845c2544c4"} Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.021605 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.082784 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-db-sync-config-data\") pod \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.082851 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tftqf\" (UniqueName: \"kubernetes.io/projected/4e617d60-d1d5-4c4e-97f7-d2cb13743714-kube-api-access-tftqf\") pod \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.082913 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-combined-ca-bundle\") pod \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.083047 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-config-data\") pod \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\" (UID: \"4e617d60-d1d5-4c4e-97f7-d2cb13743714\") " Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.089345 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e617d60-d1d5-4c4e-97f7-d2cb13743714-kube-api-access-tftqf" (OuterVolumeSpecName: "kube-api-access-tftqf") pod "4e617d60-d1d5-4c4e-97f7-d2cb13743714" (UID: "4e617d60-d1d5-4c4e-97f7-d2cb13743714"). InnerVolumeSpecName "kube-api-access-tftqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.090325 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4e617d60-d1d5-4c4e-97f7-d2cb13743714" (UID: "4e617d60-d1d5-4c4e-97f7-d2cb13743714"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.136757 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e617d60-d1d5-4c4e-97f7-d2cb13743714" (UID: "4e617d60-d1d5-4c4e-97f7-d2cb13743714"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.148714 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-config-data" (OuterVolumeSpecName: "config-data") pod "4e617d60-d1d5-4c4e-97f7-d2cb13743714" (UID: "4e617d60-d1d5-4c4e-97f7-d2cb13743714"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.184694 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.184823 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.184986 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tftqf\" (UniqueName: \"kubernetes.io/projected/4e617d60-d1d5-4c4e-97f7-d2cb13743714-kube-api-access-tftqf\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.185047 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e617d60-d1d5-4c4e-97f7-d2cb13743714-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.704787 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" event={"ID":"4e617d60-d1d5-4c4e-97f7-d2cb13743714","Type":"ContainerDied","Data":"30f4fc3350d274800695c41a41db39a4aacf8762657baef2047f6e8d9d311814"} Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.704833 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30f4fc3350d274800695c41a41db39a4aacf8762657baef2047f6e8d9d311814" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.705237 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.994553 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:22 crc kubenswrapper[4813]: E0320 16:18:22.994869 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e617d60-d1d5-4c4e-97f7-d2cb13743714" containerName="watcher-kuttl-db-sync" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.994883 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e617d60-d1d5-4c4e-97f7-d2cb13743714" containerName="watcher-kuttl-db-sync" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.995037 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e617d60-d1d5-4c4e-97f7-d2cb13743714" containerName="watcher-kuttl-db-sync" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.995837 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:22 crc kubenswrapper[4813]: I0320 16:18:22.998209 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-xc8c4" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.002712 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.018025 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.026169 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.027218 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.033904 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.066986 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099134 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099201 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099226 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099246 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdjqz\" (UniqueName: \"kubernetes.io/projected/160c90f8-7760-4a95-9731-d50189238a45-kube-api-access-rdjqz\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099260 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr8cw\" (UniqueName: \"kubernetes.io/projected/842c12a3-6562-471d-a2b0-288852fbcb27-kube-api-access-kr8cw\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099290 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099317 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/842c12a3-6562-471d-a2b0-288852fbcb27-logs\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099345 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160c90f8-7760-4a95-9731-d50189238a45-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.099364 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.113715 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.115081 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.117108 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.129910 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201222 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdjqz\" (UniqueName: \"kubernetes.io/projected/160c90f8-7760-4a95-9731-d50189238a45-kube-api-access-rdjqz\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201263 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr8cw\" (UniqueName: \"kubernetes.io/projected/842c12a3-6562-471d-a2b0-288852fbcb27-kube-api-access-kr8cw\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201304 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201332 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201368 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/842c12a3-6562-471d-a2b0-288852fbcb27-logs\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201388 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201404 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160c90f8-7760-4a95-9731-d50189238a45-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201426 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201455 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201573 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201630 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201752 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201793 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201813 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwfxt\" (UniqueName: \"kubernetes.io/projected/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-kube-api-access-zwfxt\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201848 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160c90f8-7760-4a95-9731-d50189238a45-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.201915 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/842c12a3-6562-471d-a2b0-288852fbcb27-logs\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.205245 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.205616 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.205790 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.213071 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.213216 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.218786 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr8cw\" (UniqueName: \"kubernetes.io/projected/842c12a3-6562-471d-a2b0-288852fbcb27-kube-api-access-kr8cw\") pod \"watcher-kuttl-api-0\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.222569 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdjqz\" (UniqueName: \"kubernetes.io/projected/160c90f8-7760-4a95-9731-d50189238a45-kube-api-access-rdjqz\") pod \"watcher-kuttl-applier-0\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.302945 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.303003 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.303046 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.303082 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.303143 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwfxt\" (UniqueName: \"kubernetes.io/projected/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-kube-api-access-zwfxt\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.304101 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.307007 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.310802 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.310840 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.317965 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.330032 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwfxt\" (UniqueName: \"kubernetes.io/projected/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-kube-api-access-zwfxt\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.354937 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.434109 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.862407 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:23 crc kubenswrapper[4813]: I0320 16:18:23.978257 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.025373 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.727372 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"160c90f8-7760-4a95-9731-d50189238a45","Type":"ContainerStarted","Data":"a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3"} Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.727638 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"160c90f8-7760-4a95-9731-d50189238a45","Type":"ContainerStarted","Data":"05708b60509cef4dd2161df0ebcb453be8e738080a24b64144897c0f44883de5"} Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.728751 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"9f2dfe82-c9f7-4124-a913-e4290f3c67f8","Type":"ContainerStarted","Data":"2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39"} Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.728788 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"9f2dfe82-c9f7-4124-a913-e4290f3c67f8","Type":"ContainerStarted","Data":"8c62f55d5e2ffcf7951c8c9d6dbbed4c43fb9d0f40345cfcd15e2ec3615ff16a"} Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.732059 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"842c12a3-6562-471d-a2b0-288852fbcb27","Type":"ContainerStarted","Data":"a6fee79b1371f8a7fde52899b3f6a0aba4505e6d0d066c5c3a4c8cb64b114821"} Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.732087 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"842c12a3-6562-471d-a2b0-288852fbcb27","Type":"ContainerStarted","Data":"5a9cee281f2bd67606c88eb6c07a8b5da8981de58bc2c3ef6fb7fadda12298c8"} Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.732102 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"842c12a3-6562-471d-a2b0-288852fbcb27","Type":"ContainerStarted","Data":"092e04fccf59a8559fa02fcf73e191aaa1646c90facd38bb21dd0db57a3dd7ae"} Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.732274 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.745765 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=1.745749071 podStartE2EDuration="1.745749071s" podCreationTimestamp="2026-03-20 16:18:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:24.743014698 +0000 UTC m=+2434.165717539" watchObservedRunningTime="2026-03-20 16:18:24.745749071 +0000 UTC m=+2434.168451912" Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.771860 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.771831535 podStartE2EDuration="2.771831535s" podCreationTimestamp="2026-03-20 16:18:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:24.763726416 +0000 UTC m=+2434.186429257" watchObservedRunningTime="2026-03-20 16:18:24.771831535 +0000 UTC m=+2434.194534376" Mar 20 16:18:24 crc kubenswrapper[4813]: I0320 16:18:24.786797 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=1.786777148 podStartE2EDuration="1.786777148s" podCreationTimestamp="2026-03-20 16:18:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:24.780136859 +0000 UTC m=+2434.202839700" watchObservedRunningTime="2026-03-20 16:18:24.786777148 +0000 UTC m=+2434.209479989" Mar 20 16:18:27 crc kubenswrapper[4813]: I0320 16:18:27.012468 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:28 crc kubenswrapper[4813]: I0320 16:18:28.311753 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:28 crc kubenswrapper[4813]: I0320 16:18:28.355427 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.311841 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.318436 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.356017 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.385336 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.435543 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.462523 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.829821 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.837069 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.842344 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.842608 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.853474 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:33 crc kubenswrapper[4813]: I0320 16:18:33.860795 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.135891 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.136523 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-central-agent" containerID="cri-o://2164c0f96e3b1574bcb9688f49455edca5d428009ba403c344e3c07a76e3355d" gracePeriod=30 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.143624 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-notification-agent" containerID="cri-o://fe4d52dec7e94a335cba528228e0255a87ce66bbe0e7a45660e1adc86d64bba5" gracePeriod=30 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.143633 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="proxy-httpd" containerID="cri-o://fae32c8b6fcc10c795902851de40d2505c597ffa4bb420cef7deaff0feb4a863" gracePeriod=30 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.143624 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="sg-core" containerID="cri-o://650c5c8dbcdbeb0d07a1ace2ead1e570a3a7a21aed8e28e03e9e771673b93a8c" gracePeriod=30 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.170548 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.269550 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.290694 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-b7cdv"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.310864 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher7235-account-delete-nqks2"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.312936 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.349026 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkptp\" (UniqueName: \"kubernetes.io/projected/d21a34ba-4332-4efc-8f84-ec4446c8f566-kube-api-access-xkptp\") pod \"watcher7235-account-delete-nqks2\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.349182 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21a34ba-4332-4efc-8f84-ec4446c8f566-operator-scripts\") pod \"watcher7235-account-delete-nqks2\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.353647 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher7235-account-delete-nqks2"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.396521 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.397145 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="160c90f8-7760-4a95-9731-d50189238a45" containerName="watcher-applier" containerID="cri-o://a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3" gracePeriod=30 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.421323 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.450474 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21a34ba-4332-4efc-8f84-ec4446c8f566-operator-scripts\") pod \"watcher7235-account-delete-nqks2\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.450605 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkptp\" (UniqueName: \"kubernetes.io/projected/d21a34ba-4332-4efc-8f84-ec4446c8f566-kube-api-access-xkptp\") pod \"watcher7235-account-delete-nqks2\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.451421 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21a34ba-4332-4efc-8f84-ec4446c8f566-operator-scripts\") pod \"watcher7235-account-delete-nqks2\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.464137 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.466537 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-kuttl-api-log" containerID="cri-o://5a9cee281f2bd67606c88eb6c07a8b5da8981de58bc2c3ef6fb7fadda12298c8" gracePeriod=30 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.466614 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-api" containerID="cri-o://a6fee79b1371f8a7fde52899b3f6a0aba4505e6d0d066c5c3a4c8cb64b114821" gracePeriod=30 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.504252 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkptp\" (UniqueName: \"kubernetes.io/projected/d21a34ba-4332-4efc-8f84-ec4446c8f566-kube-api-access-xkptp\") pod \"watcher7235-account-delete-nqks2\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.668044 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.873593 4813 generic.go:334] "Generic (PLEG): container finished" podID="842c12a3-6562-471d-a2b0-288852fbcb27" containerID="5a9cee281f2bd67606c88eb6c07a8b5da8981de58bc2c3ef6fb7fadda12298c8" exitCode=143 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.873917 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"842c12a3-6562-471d-a2b0-288852fbcb27","Type":"ContainerDied","Data":"5a9cee281f2bd67606c88eb6c07a8b5da8981de58bc2c3ef6fb7fadda12298c8"} Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.876400 4813 generic.go:334] "Generic (PLEG): container finished" podID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerID="fae32c8b6fcc10c795902851de40d2505c597ffa4bb420cef7deaff0feb4a863" exitCode=0 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.876425 4813 generic.go:334] "Generic (PLEG): container finished" podID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerID="650c5c8dbcdbeb0d07a1ace2ead1e570a3a7a21aed8e28e03e9e771673b93a8c" exitCode=2 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.876432 4813 generic.go:334] "Generic (PLEG): container finished" podID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerID="2164c0f96e3b1574bcb9688f49455edca5d428009ba403c344e3c07a76e3355d" exitCode=0 Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.876611 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerDied","Data":"fae32c8b6fcc10c795902851de40d2505c597ffa4bb420cef7deaff0feb4a863"} Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.876667 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerDied","Data":"650c5c8dbcdbeb0d07a1ace2ead1e570a3a7a21aed8e28e03e9e771673b93a8c"} Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.876683 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerDied","Data":"2164c0f96e3b1574bcb9688f49455edca5d428009ba403c344e3c07a76e3355d"} Mar 20 16:18:36 crc kubenswrapper[4813]: I0320 16:18:36.876722 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="9f2dfe82-c9f7-4124-a913-e4290f3c67f8" containerName="watcher-decision-engine" containerID="cri-o://2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39" gracePeriod=30 Mar 20 16:18:37 crc kubenswrapper[4813]: I0320 16:18:37.198632 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher7235-account-delete-nqks2"] Mar 20 16:18:37 crc kubenswrapper[4813]: W0320 16:18:37.200699 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd21a34ba_4332_4efc_8f84_ec4446c8f566.slice/crio-e9802a4bad710e948e4d3d0a1e35062b1b0f668290738d6e0a450f7c578f9e26 WatchSource:0}: Error finding container e9802a4bad710e948e4d3d0a1e35062b1b0f668290738d6e0a450f7c578f9e26: Status 404 returned error can't find the container with id e9802a4bad710e948e4d3d0a1e35062b1b0f668290738d6e0a450f7c578f9e26 Mar 20 16:18:37 crc kubenswrapper[4813]: I0320 16:18:37.278182 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e617d60-d1d5-4c4e-97f7-d2cb13743714" path="/var/lib/kubelet/pods/4e617d60-d1d5-4c4e-97f7-d2cb13743714/volumes" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:37.930020 4813 generic.go:334] "Generic (PLEG): container finished" podID="d21a34ba-4332-4efc-8f84-ec4446c8f566" containerID="1a791b265264b88e19f353e17cf480a49c1406bb02f4c6f167f0e1838ad1ac97" exitCode=0 Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:37.930227 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" event={"ID":"d21a34ba-4332-4efc-8f84-ec4446c8f566","Type":"ContainerDied","Data":"1a791b265264b88e19f353e17cf480a49c1406bb02f4c6f167f0e1838ad1ac97"} Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:37.930381 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" event={"ID":"d21a34ba-4332-4efc-8f84-ec4446c8f566","Type":"ContainerStarted","Data":"e9802a4bad710e948e4d3d0a1e35062b1b0f668290738d6e0a450f7c578f9e26"} Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:37.967701 4813 generic.go:334] "Generic (PLEG): container finished" podID="842c12a3-6562-471d-a2b0-288852fbcb27" containerID="a6fee79b1371f8a7fde52899b3f6a0aba4505e6d0d066c5c3a4c8cb64b114821" exitCode=0 Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:37.967853 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"842c12a3-6562-471d-a2b0-288852fbcb27","Type":"ContainerDied","Data":"a6fee79b1371f8a7fde52899b3f6a0aba4505e6d0d066c5c3a4c8cb64b114821"} Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.004824 4813 generic.go:334] "Generic (PLEG): container finished" podID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerID="fe4d52dec7e94a335cba528228e0255a87ce66bbe0e7a45660e1adc86d64bba5" exitCode=0 Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.005200 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerDied","Data":"fe4d52dec7e94a335cba528228e0255a87ce66bbe0e7a45660e1adc86d64bba5"} Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.167776 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183051 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-combined-ca-bundle\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183092 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-scripts\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183129 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-config-data\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183173 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kdrr\" (UniqueName: \"kubernetes.io/projected/5c660b70-96a8-49fa-927e-a04a97ccf486-kube-api-access-8kdrr\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183222 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-log-httpd\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183256 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-ceilometer-tls-certs\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183285 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-run-httpd\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.183370 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-sg-core-conf-yaml\") pod \"5c660b70-96a8-49fa-927e-a04a97ccf486\" (UID: \"5c660b70-96a8-49fa-927e-a04a97ccf486\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.185440 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.185772 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.193513 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c660b70-96a8-49fa-927e-a04a97ccf486-kube-api-access-8kdrr" (OuterVolumeSpecName: "kube-api-access-8kdrr") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "kube-api-access-8kdrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.203264 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-scripts" (OuterVolumeSpecName: "scripts") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.212119 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.253598 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.274911 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.284835 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.284856 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.284866 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.284874 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kdrr\" (UniqueName: \"kubernetes.io/projected/5c660b70-96a8-49fa-927e-a04a97ccf486-kube-api-access-8kdrr\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.284882 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.284891 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.284898 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c660b70-96a8-49fa-927e-a04a97ccf486-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.311667 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-config-data" (OuterVolumeSpecName: "config-data") pod "5c660b70-96a8-49fa-927e-a04a97ccf486" (UID: "5c660b70-96a8-49fa-927e-a04a97ccf486"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.311702 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.170:9322/\": dial tcp 10.217.0.170:9322: connect: connection refused" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.311768 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.170:9322/\": dial tcp 10.217.0.170:9322: connect: connection refused" Mar 20 16:18:38 crc kubenswrapper[4813]: E0320 16:18:38.358629 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:18:38 crc kubenswrapper[4813]: E0320 16:18:38.360079 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:18:38 crc kubenswrapper[4813]: E0320 16:18:38.361163 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:18:38 crc kubenswrapper[4813]: E0320 16:18:38.361194 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="160c90f8-7760-4a95-9731-d50189238a45" containerName="watcher-applier" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.386519 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c660b70-96a8-49fa-927e-a04a97ccf486-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.656227 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.696326 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr8cw\" (UniqueName: \"kubernetes.io/projected/842c12a3-6562-471d-a2b0-288852fbcb27-kube-api-access-kr8cw\") pod \"842c12a3-6562-471d-a2b0-288852fbcb27\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.696549 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/842c12a3-6562-471d-a2b0-288852fbcb27-logs\") pod \"842c12a3-6562-471d-a2b0-288852fbcb27\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.696593 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-config-data\") pod \"842c12a3-6562-471d-a2b0-288852fbcb27\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.696635 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-custom-prometheus-ca\") pod \"842c12a3-6562-471d-a2b0-288852fbcb27\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.696665 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-combined-ca-bundle\") pod \"842c12a3-6562-471d-a2b0-288852fbcb27\" (UID: \"842c12a3-6562-471d-a2b0-288852fbcb27\") " Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.698691 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/842c12a3-6562-471d-a2b0-288852fbcb27-logs" (OuterVolumeSpecName: "logs") pod "842c12a3-6562-471d-a2b0-288852fbcb27" (UID: "842c12a3-6562-471d-a2b0-288852fbcb27"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.714836 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/842c12a3-6562-471d-a2b0-288852fbcb27-kube-api-access-kr8cw" (OuterVolumeSpecName: "kube-api-access-kr8cw") pod "842c12a3-6562-471d-a2b0-288852fbcb27" (UID: "842c12a3-6562-471d-a2b0-288852fbcb27"). InnerVolumeSpecName "kube-api-access-kr8cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.721517 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "842c12a3-6562-471d-a2b0-288852fbcb27" (UID: "842c12a3-6562-471d-a2b0-288852fbcb27"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.722610 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "842c12a3-6562-471d-a2b0-288852fbcb27" (UID: "842c12a3-6562-471d-a2b0-288852fbcb27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.738398 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-config-data" (OuterVolumeSpecName: "config-data") pod "842c12a3-6562-471d-a2b0-288852fbcb27" (UID: "842c12a3-6562-471d-a2b0-288852fbcb27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.798765 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/842c12a3-6562-471d-a2b0-288852fbcb27-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.799003 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.799017 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.799027 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/842c12a3-6562-471d-a2b0-288852fbcb27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:38 crc kubenswrapper[4813]: I0320 16:18:38.799036 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr8cw\" (UniqueName: \"kubernetes.io/projected/842c12a3-6562-471d-a2b0-288852fbcb27-kube-api-access-kr8cw\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.014681 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"842c12a3-6562-471d-a2b0-288852fbcb27","Type":"ContainerDied","Data":"092e04fccf59a8559fa02fcf73e191aaa1646c90facd38bb21dd0db57a3dd7ae"} Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.014790 4813 scope.go:117] "RemoveContainer" containerID="a6fee79b1371f8a7fde52899b3f6a0aba4505e6d0d066c5c3a4c8cb64b114821" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.015000 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.017528 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"5c660b70-96a8-49fa-927e-a04a97ccf486","Type":"ContainerDied","Data":"523d25901daf7d82b5fe001cb77c759b4ff54f5192e8a7686761ff257428175f"} Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.017708 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.063628 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.077859 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.087172 4813 scope.go:117] "RemoveContainer" containerID="5a9cee281f2bd67606c88eb6c07a8b5da8981de58bc2c3ef6fb7fadda12298c8" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.093124 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.135771 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.150330 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:39 crc kubenswrapper[4813]: E0320 16:18:39.150730 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-kuttl-api-log" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.150751 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-kuttl-api-log" Mar 20 16:18:39 crc kubenswrapper[4813]: E0320 16:18:39.150770 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-central-agent" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.150779 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-central-agent" Mar 20 16:18:39 crc kubenswrapper[4813]: E0320 16:18:39.150791 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-notification-agent" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.150798 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-notification-agent" Mar 20 16:18:39 crc kubenswrapper[4813]: E0320 16:18:39.150811 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-api" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.150818 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-api" Mar 20 16:18:39 crc kubenswrapper[4813]: E0320 16:18:39.150836 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="sg-core" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.150842 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="sg-core" Mar 20 16:18:39 crc kubenswrapper[4813]: E0320 16:18:39.150862 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="proxy-httpd" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.150868 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="proxy-httpd" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.151026 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-notification-agent" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.151046 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="ceilometer-central-agent" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.151064 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="proxy-httpd" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.151074 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-kuttl-api-log" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.151085 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" containerName="sg-core" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.151097 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" containerName="watcher-api" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.152662 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.155886 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.155997 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.156401 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.158227 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.158892 4813 scope.go:117] "RemoveContainer" containerID="fae32c8b6fcc10c795902851de40d2505c597ffa4bb420cef7deaff0feb4a863" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.187709 4813 scope.go:117] "RemoveContainer" containerID="650c5c8dbcdbeb0d07a1ace2ead1e570a3a7a21aed8e28e03e9e771673b93a8c" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205648 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205694 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-log-httpd\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205715 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205732 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205756 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-config-data\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205793 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-run-httpd\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205818 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt57r\" (UniqueName: \"kubernetes.io/projected/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-kube-api-access-pt57r\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.205844 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-scripts\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.214850 4813 scope.go:117] "RemoveContainer" containerID="fe4d52dec7e94a335cba528228e0255a87ce66bbe0e7a45660e1adc86d64bba5" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.234162 4813 scope.go:117] "RemoveContainer" containerID="2164c0f96e3b1574bcb9688f49455edca5d428009ba403c344e3c07a76e3355d" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.286518 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c660b70-96a8-49fa-927e-a04a97ccf486" path="/var/lib/kubelet/pods/5c660b70-96a8-49fa-927e-a04a97ccf486/volumes" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.295367 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="842c12a3-6562-471d-a2b0-288852fbcb27" path="/var/lib/kubelet/pods/842c12a3-6562-471d-a2b0-288852fbcb27/volumes" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.307657 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-run-httpd\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.307726 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt57r\" (UniqueName: \"kubernetes.io/projected/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-kube-api-access-pt57r\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.307772 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-scripts\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.307915 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.307944 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-log-httpd\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.307963 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.307987 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.308017 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-config-data\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.310148 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-run-httpd\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.310372 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-log-httpd\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.317019 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.317065 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-scripts\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.317677 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-config-data\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.322445 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.334848 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt57r\" (UniqueName: \"kubernetes.io/projected/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-kube-api-access-pt57r\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.336650 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.433653 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.434868 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.456349 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.512074 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21a34ba-4332-4efc-8f84-ec4446c8f566-operator-scripts\") pod \"d21a34ba-4332-4efc-8f84-ec4446c8f566\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.512247 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkptp\" (UniqueName: \"kubernetes.io/projected/d21a34ba-4332-4efc-8f84-ec4446c8f566-kube-api-access-xkptp\") pod \"d21a34ba-4332-4efc-8f84-ec4446c8f566\" (UID: \"d21a34ba-4332-4efc-8f84-ec4446c8f566\") " Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.512839 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d21a34ba-4332-4efc-8f84-ec4446c8f566-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d21a34ba-4332-4efc-8f84-ec4446c8f566" (UID: "d21a34ba-4332-4efc-8f84-ec4446c8f566"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.516775 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d21a34ba-4332-4efc-8f84-ec4446c8f566-kube-api-access-xkptp" (OuterVolumeSpecName: "kube-api-access-xkptp") pod "d21a34ba-4332-4efc-8f84-ec4446c8f566" (UID: "d21a34ba-4332-4efc-8f84-ec4446c8f566"). InnerVolumeSpecName "kube-api-access-xkptp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.614800 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkptp\" (UniqueName: \"kubernetes.io/projected/d21a34ba-4332-4efc-8f84-ec4446c8f566-kube-api-access-xkptp\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.614845 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21a34ba-4332-4efc-8f84-ec4446c8f566-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:39 crc kubenswrapper[4813]: I0320 16:18:39.884914 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.028692 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerStarted","Data":"21c1104baa1474c16756913853bbc7e5c27aea7f7cc75cfa9289b62791c97fc0"} Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.029964 4813 generic.go:334] "Generic (PLEG): container finished" podID="160c90f8-7760-4a95-9731-d50189238a45" containerID="a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3" exitCode=0 Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.030018 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"160c90f8-7760-4a95-9731-d50189238a45","Type":"ContainerDied","Data":"a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3"} Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.031542 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" event={"ID":"d21a34ba-4332-4efc-8f84-ec4446c8f566","Type":"ContainerDied","Data":"e9802a4bad710e948e4d3d0a1e35062b1b0f668290738d6e0a450f7c578f9e26"} Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.031579 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9802a4bad710e948e4d3d0a1e35062b1b0f668290738d6e0a450f7c578f9e26" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.031597 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher7235-account-delete-nqks2" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.362843 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.540835 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-combined-ca-bundle\") pod \"160c90f8-7760-4a95-9731-d50189238a45\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.540873 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-config-data\") pod \"160c90f8-7760-4a95-9731-d50189238a45\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.540967 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160c90f8-7760-4a95-9731-d50189238a45-logs\") pod \"160c90f8-7760-4a95-9731-d50189238a45\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.540989 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdjqz\" (UniqueName: \"kubernetes.io/projected/160c90f8-7760-4a95-9731-d50189238a45-kube-api-access-rdjqz\") pod \"160c90f8-7760-4a95-9731-d50189238a45\" (UID: \"160c90f8-7760-4a95-9731-d50189238a45\") " Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.541854 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/160c90f8-7760-4a95-9731-d50189238a45-logs" (OuterVolumeSpecName: "logs") pod "160c90f8-7760-4a95-9731-d50189238a45" (UID: "160c90f8-7760-4a95-9731-d50189238a45"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.547733 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/160c90f8-7760-4a95-9731-d50189238a45-kube-api-access-rdjqz" (OuterVolumeSpecName: "kube-api-access-rdjqz") pod "160c90f8-7760-4a95-9731-d50189238a45" (UID: "160c90f8-7760-4a95-9731-d50189238a45"). InnerVolumeSpecName "kube-api-access-rdjqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.572532 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "160c90f8-7760-4a95-9731-d50189238a45" (UID: "160c90f8-7760-4a95-9731-d50189238a45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.601542 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-config-data" (OuterVolumeSpecName: "config-data") pod "160c90f8-7760-4a95-9731-d50189238a45" (UID: "160c90f8-7760-4a95-9731-d50189238a45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.643882 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/160c90f8-7760-4a95-9731-d50189238a45-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.643916 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdjqz\" (UniqueName: \"kubernetes.io/projected/160c90f8-7760-4a95-9731-d50189238a45-kube-api-access-rdjqz\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.643927 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:40 crc kubenswrapper[4813]: I0320 16:18:40.643935 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/160c90f8-7760-4a95-9731-d50189238a45-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.042593 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerStarted","Data":"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7"} Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.045826 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"160c90f8-7760-4a95-9731-d50189238a45","Type":"ContainerDied","Data":"05708b60509cef4dd2161df0ebcb453be8e738080a24b64144897c0f44883de5"} Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.045875 4813 scope.go:117] "RemoveContainer" containerID="a0bcd8c47ed9d336ecacdf7fb3025f6b5a43f865635ffced2b68da6a720b3af3" Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.045978 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.098976 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.113215 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.274080 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="160c90f8-7760-4a95-9731-d50189238a45" path="/var/lib/kubelet/pods/160c90f8-7760-4a95-9731-d50189238a45/volumes" Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.340519 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-4z67w"] Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.347958 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-4z67w"] Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.359182 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher7235-account-delete-nqks2"] Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.367188 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-7235-account-create-update-h27lb"] Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.373934 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher7235-account-delete-nqks2"] Mar 20 16:18:41 crc kubenswrapper[4813]: I0320 16:18:41.381896 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-7235-account-create-update-h27lb"] Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.041046 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.060015 4813 generic.go:334] "Generic (PLEG): container finished" podID="9f2dfe82-c9f7-4124-a913-e4290f3c67f8" containerID="2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39" exitCode=0 Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.060099 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"9f2dfe82-c9f7-4124-a913-e4290f3c67f8","Type":"ContainerDied","Data":"2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39"} Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.060131 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"9f2dfe82-c9f7-4124-a913-e4290f3c67f8","Type":"ContainerDied","Data":"8c62f55d5e2ffcf7951c8c9d6dbbed4c43fb9d0f40345cfcd15e2ec3615ff16a"} Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.060153 4813 scope.go:117] "RemoveContainer" containerID="2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.060277 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.068848 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerStarted","Data":"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c"} Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.106021 4813 scope.go:117] "RemoveContainer" containerID="2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39" Mar 20 16:18:42 crc kubenswrapper[4813]: E0320 16:18:42.106959 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39\": container with ID starting with 2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39 not found: ID does not exist" containerID="2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.107017 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39"} err="failed to get container status \"2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39\": rpc error: code = NotFound desc = could not find container \"2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39\": container with ID starting with 2541f6b36e89d89221de61fcec6dcfcd01d844fcdfe1d60f4040df2a46c28c39 not found: ID does not exist" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.171364 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-logs\") pod \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.171981 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-logs" (OuterVolumeSpecName: "logs") pod "9f2dfe82-c9f7-4124-a913-e4290f3c67f8" (UID: "9f2dfe82-c9f7-4124-a913-e4290f3c67f8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.172036 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-custom-prometheus-ca\") pod \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.172066 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-config-data\") pod \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.172582 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwfxt\" (UniqueName: \"kubernetes.io/projected/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-kube-api-access-zwfxt\") pod \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.172615 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-combined-ca-bundle\") pod \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\" (UID: \"9f2dfe82-c9f7-4124-a913-e4290f3c67f8\") " Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.172817 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.178685 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-kube-api-access-zwfxt" (OuterVolumeSpecName: "kube-api-access-zwfxt") pod "9f2dfe82-c9f7-4124-a913-e4290f3c67f8" (UID: "9f2dfe82-c9f7-4124-a913-e4290f3c67f8"). InnerVolumeSpecName "kube-api-access-zwfxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.200643 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f2dfe82-c9f7-4124-a913-e4290f3c67f8" (UID: "9f2dfe82-c9f7-4124-a913-e4290f3c67f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.214800 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "9f2dfe82-c9f7-4124-a913-e4290f3c67f8" (UID: "9f2dfe82-c9f7-4124-a913-e4290f3c67f8"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.235807 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-config-data" (OuterVolumeSpecName: "config-data") pod "9f2dfe82-c9f7-4124-a913-e4290f3c67f8" (UID: "9f2dfe82-c9f7-4124-a913-e4290f3c67f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.274679 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwfxt\" (UniqueName: \"kubernetes.io/projected/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-kube-api-access-zwfxt\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.274712 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.274721 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.274731 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f2dfe82-c9f7-4124-a913-e4290f3c67f8-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.398025 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:42 crc kubenswrapper[4813]: I0320 16:18:42.403773 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:42 crc kubenswrapper[4813]: E0320 16:18:42.538279 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f2dfe82_c9f7_4124_a913_e4290f3c67f8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f2dfe82_c9f7_4124_a913_e4290f3c67f8.slice/crio-8c62f55d5e2ffcf7951c8c9d6dbbed4c43fb9d0f40345cfcd15e2ec3615ff16a\": RecentStats: unable to find data in memory cache]" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.078507 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerStarted","Data":"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a"} Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.177159 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-9kz7w"] Mar 20 16:18:43 crc kubenswrapper[4813]: E0320 16:18:43.177464 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f2dfe82-c9f7-4124-a913-e4290f3c67f8" containerName="watcher-decision-engine" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.177494 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f2dfe82-c9f7-4124-a913-e4290f3c67f8" containerName="watcher-decision-engine" Mar 20 16:18:43 crc kubenswrapper[4813]: E0320 16:18:43.177505 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="160c90f8-7760-4a95-9731-d50189238a45" containerName="watcher-applier" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.177511 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="160c90f8-7760-4a95-9731-d50189238a45" containerName="watcher-applier" Mar 20 16:18:43 crc kubenswrapper[4813]: E0320 16:18:43.177534 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d21a34ba-4332-4efc-8f84-ec4446c8f566" containerName="mariadb-account-delete" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.177541 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d21a34ba-4332-4efc-8f84-ec4446c8f566" containerName="mariadb-account-delete" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.177677 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="160c90f8-7760-4a95-9731-d50189238a45" containerName="watcher-applier" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.177705 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d21a34ba-4332-4efc-8f84-ec4446c8f566" containerName="mariadb-account-delete" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.177714 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f2dfe82-c9f7-4124-a913-e4290f3c67f8" containerName="watcher-decision-engine" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.178243 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.189880 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-9kz7w"] Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.197423 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b0891f1-e2fe-40d3-bec6-5306428b0086-operator-scripts\") pod \"watcher-db-create-9kz7w\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.197573 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g72l\" (UniqueName: \"kubernetes.io/projected/3b0891f1-e2fe-40d3-bec6-5306428b0086-kube-api-access-2g72l\") pod \"watcher-db-create-9kz7w\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.276098 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="585ae5c1-6601-4d39-a127-9a5e1abf1c54" path="/var/lib/kubelet/pods/585ae5c1-6601-4d39-a127-9a5e1abf1c54/volumes" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.276640 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f8a88e5-87f3-4ada-8618-97f7f7ea28ca" path="/var/lib/kubelet/pods/5f8a88e5-87f3-4ada-8618-97f7f7ea28ca/volumes" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.277132 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f2dfe82-c9f7-4124-a913-e4290f3c67f8" path="/var/lib/kubelet/pods/9f2dfe82-c9f7-4124-a913-e4290f3c67f8/volumes" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.278168 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d21a34ba-4332-4efc-8f84-ec4446c8f566" path="/var/lib/kubelet/pods/d21a34ba-4332-4efc-8f84-ec4446c8f566/volumes" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.284811 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-e8de-account-create-update-zzf68"] Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.286116 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.289059 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.294509 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-e8de-account-create-update-zzf68"] Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.299042 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-operator-scripts\") pod \"watcher-e8de-account-create-update-zzf68\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.299160 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwc2r\" (UniqueName: \"kubernetes.io/projected/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-kube-api-access-fwc2r\") pod \"watcher-e8de-account-create-update-zzf68\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.299245 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b0891f1-e2fe-40d3-bec6-5306428b0086-operator-scripts\") pod \"watcher-db-create-9kz7w\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.299299 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g72l\" (UniqueName: \"kubernetes.io/projected/3b0891f1-e2fe-40d3-bec6-5306428b0086-kube-api-access-2g72l\") pod \"watcher-db-create-9kz7w\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.308367 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b0891f1-e2fe-40d3-bec6-5306428b0086-operator-scripts\") pod \"watcher-db-create-9kz7w\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.343467 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g72l\" (UniqueName: \"kubernetes.io/projected/3b0891f1-e2fe-40d3-bec6-5306428b0086-kube-api-access-2g72l\") pod \"watcher-db-create-9kz7w\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.400464 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-operator-scripts\") pod \"watcher-e8de-account-create-update-zzf68\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.400568 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwc2r\" (UniqueName: \"kubernetes.io/projected/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-kube-api-access-fwc2r\") pod \"watcher-e8de-account-create-update-zzf68\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.401279 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-operator-scripts\") pod \"watcher-e8de-account-create-update-zzf68\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.418933 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwc2r\" (UniqueName: \"kubernetes.io/projected/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-kube-api-access-fwc2r\") pod \"watcher-e8de-account-create-update-zzf68\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.492870 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:43 crc kubenswrapper[4813]: I0320 16:18:43.609402 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:44 crc kubenswrapper[4813]: I0320 16:18:44.005294 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-9kz7w"] Mar 20 16:18:44 crc kubenswrapper[4813]: I0320 16:18:44.114194 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-9kz7w" event={"ID":"3b0891f1-e2fe-40d3-bec6-5306428b0086","Type":"ContainerStarted","Data":"cfea68372358ab1cc7a31722cd53cd2c784e96bd9817bd462b04cdaf5fab2ab1"} Mar 20 16:18:44 crc kubenswrapper[4813]: I0320 16:18:44.119823 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-e8de-account-create-update-zzf68"] Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.125833 4813 generic.go:334] "Generic (PLEG): container finished" podID="3b0891f1-e2fe-40d3-bec6-5306428b0086" containerID="622b484c30f222f6a9884bbd7c89e92281f9830334c462ae152c4f611fa8a555" exitCode=0 Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.125942 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-9kz7w" event={"ID":"3b0891f1-e2fe-40d3-bec6-5306428b0086","Type":"ContainerDied","Data":"622b484c30f222f6a9884bbd7c89e92281f9830334c462ae152c4f611fa8a555"} Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.130312 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerStarted","Data":"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73"} Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.130629 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="proxy-httpd" containerID="cri-o://99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" gracePeriod=30 Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.130673 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-central-agent" containerID="cri-o://78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" gracePeriod=30 Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.130678 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="sg-core" containerID="cri-o://9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" gracePeriod=30 Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.130693 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.130694 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-notification-agent" containerID="cri-o://dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" gracePeriod=30 Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.134539 4813 generic.go:334] "Generic (PLEG): container finished" podID="c7e7f714-64b2-4d9c-856d-aaefe3b118cc" containerID="e0e53fb9e0d5566adcb24edd21113bcce663c4b8d3aba2c217cf3599f94e0129" exitCode=0 Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.134623 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" event={"ID":"c7e7f714-64b2-4d9c-856d-aaefe3b118cc","Type":"ContainerDied","Data":"e0e53fb9e0d5566adcb24edd21113bcce663c4b8d3aba2c217cf3599f94e0129"} Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.134670 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" event={"ID":"c7e7f714-64b2-4d9c-856d-aaefe3b118cc","Type":"ContainerStarted","Data":"bafc6070e433a1cb96aae73af6f9afe4795540bac2a0d7efdedcbeeedc7e2294"} Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.189861 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.017718633 podStartE2EDuration="6.18983915s" podCreationTimestamp="2026-03-20 16:18:39 +0000 UTC" firstStartedPulling="2026-03-20 16:18:39.891521152 +0000 UTC m=+2449.314224003" lastFinishedPulling="2026-03-20 16:18:44.063641679 +0000 UTC m=+2453.486344520" observedRunningTime="2026-03-20 16:18:45.185168674 +0000 UTC m=+2454.607871525" watchObservedRunningTime="2026-03-20 16:18:45.18983915 +0000 UTC m=+2454.612542001" Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.876642 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975136 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pt57r\" (UniqueName: \"kubernetes.io/projected/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-kube-api-access-pt57r\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975187 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-run-httpd\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975243 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-config-data\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975313 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-log-httpd\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975332 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-ceilometer-tls-certs\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975381 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-scripts\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975414 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-combined-ca-bundle\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975431 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-sg-core-conf-yaml\") pod \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\" (UID: \"f153c2ca-e44b-4702-ac34-dd1b9b3111b4\") " Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.975788 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.976026 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.980787 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-kube-api-access-pt57r" (OuterVolumeSpecName: "kube-api-access-pt57r") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "kube-api-access-pt57r". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:45 crc kubenswrapper[4813]: I0320 16:18:45.985912 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-scripts" (OuterVolumeSpecName: "scripts") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.011326 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.040609 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.044370 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.076767 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.076836 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.076847 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.076855 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.076863 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.076871 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pt57r\" (UniqueName: \"kubernetes.io/projected/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-kube-api-access-pt57r\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.076880 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.085236 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-config-data" (OuterVolumeSpecName: "config-data") pod "f153c2ca-e44b-4702-ac34-dd1b9b3111b4" (UID: "f153c2ca-e44b-4702-ac34-dd1b9b3111b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150176 4813 generic.go:334] "Generic (PLEG): container finished" podID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerID="99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" exitCode=0 Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150208 4813 generic.go:334] "Generic (PLEG): container finished" podID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerID="9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" exitCode=2 Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150215 4813 generic.go:334] "Generic (PLEG): container finished" podID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerID="dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" exitCode=0 Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150222 4813 generic.go:334] "Generic (PLEG): container finished" podID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerID="78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" exitCode=0 Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150226 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150281 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerDied","Data":"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73"} Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150341 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerDied","Data":"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a"} Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150357 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerDied","Data":"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c"} Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150372 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerDied","Data":"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7"} Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150384 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"f153c2ca-e44b-4702-ac34-dd1b9b3111b4","Type":"ContainerDied","Data":"21c1104baa1474c16756913853bbc7e5c27aea7f7cc75cfa9289b62791c97fc0"} Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.150404 4813 scope.go:117] "RemoveContainer" containerID="99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.182329 4813 scope.go:117] "RemoveContainer" containerID="9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.183420 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f153c2ca-e44b-4702-ac34-dd1b9b3111b4-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.207971 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.212920 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.224236 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.224573 4813 scope.go:117] "RemoveContainer" containerID="dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.224615 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-central-agent" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.224630 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-central-agent" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.224659 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="proxy-httpd" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.224666 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="proxy-httpd" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.224674 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-notification-agent" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.224683 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-notification-agent" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.224702 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="sg-core" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.224708 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="sg-core" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.226961 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="sg-core" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.226983 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="proxy-httpd" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.226996 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-central-agent" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.227014 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" containerName="ceilometer-notification-agent" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.228361 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.230158 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.230395 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.231042 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.241171 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.252465 4813 scope.go:117] "RemoveContainer" containerID="78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.284789 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-scripts\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.285674 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4b95\" (UniqueName: \"kubernetes.io/projected/4326342e-12ee-42a9-ba93-4f0e49b8b45c-kube-api-access-h4b95\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.285706 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-config-data\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.285770 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.285796 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.286014 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-log-httpd\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.286145 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.286192 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-run-httpd\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.298914 4813 scope.go:117] "RemoveContainer" containerID="99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.300592 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": container with ID starting with 99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73 not found: ID does not exist" containerID="99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.300633 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73"} err="failed to get container status \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": rpc error: code = NotFound desc = could not find container \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": container with ID starting with 99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.300658 4813 scope.go:117] "RemoveContainer" containerID="9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.305633 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": container with ID starting with 9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a not found: ID does not exist" containerID="9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.305673 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a"} err="failed to get container status \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": rpc error: code = NotFound desc = could not find container \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": container with ID starting with 9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.305700 4813 scope.go:117] "RemoveContainer" containerID="dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.311910 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": container with ID starting with dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c not found: ID does not exist" containerID="dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.311954 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c"} err="failed to get container status \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": rpc error: code = NotFound desc = could not find container \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": container with ID starting with dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.311980 4813 scope.go:117] "RemoveContainer" containerID="78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" Mar 20 16:18:46 crc kubenswrapper[4813]: E0320 16:18:46.323795 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": container with ID starting with 78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7 not found: ID does not exist" containerID="78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.323857 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7"} err="failed to get container status \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": rpc error: code = NotFound desc = could not find container \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": container with ID starting with 78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.323897 4813 scope.go:117] "RemoveContainer" containerID="99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.326290 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73"} err="failed to get container status \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": rpc error: code = NotFound desc = could not find container \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": container with ID starting with 99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.326328 4813 scope.go:117] "RemoveContainer" containerID="9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.326715 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a"} err="failed to get container status \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": rpc error: code = NotFound desc = could not find container \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": container with ID starting with 9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.326744 4813 scope.go:117] "RemoveContainer" containerID="dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327064 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c"} err="failed to get container status \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": rpc error: code = NotFound desc = could not find container \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": container with ID starting with dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327086 4813 scope.go:117] "RemoveContainer" containerID="78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327285 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7"} err="failed to get container status \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": rpc error: code = NotFound desc = could not find container \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": container with ID starting with 78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327307 4813 scope.go:117] "RemoveContainer" containerID="99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327529 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73"} err="failed to get container status \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": rpc error: code = NotFound desc = could not find container \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": container with ID starting with 99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327551 4813 scope.go:117] "RemoveContainer" containerID="9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327799 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a"} err="failed to get container status \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": rpc error: code = NotFound desc = could not find container \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": container with ID starting with 9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.327822 4813 scope.go:117] "RemoveContainer" containerID="dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328059 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c"} err="failed to get container status \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": rpc error: code = NotFound desc = could not find container \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": container with ID starting with dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328082 4813 scope.go:117] "RemoveContainer" containerID="78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328263 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7"} err="failed to get container status \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": rpc error: code = NotFound desc = could not find container \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": container with ID starting with 78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328285 4813 scope.go:117] "RemoveContainer" containerID="99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328448 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73"} err="failed to get container status \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": rpc error: code = NotFound desc = could not find container \"99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73\": container with ID starting with 99e384485e729b160b43093113b4389b1188edfa016018b9a29a71cd6639fc73 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328468 4813 scope.go:117] "RemoveContainer" containerID="9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328643 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a"} err="failed to get container status \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": rpc error: code = NotFound desc = could not find container \"9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a\": container with ID starting with 9a270cc1da25b6516486de7cded5222c105f9af79784d13307797043bd2dda2a not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328665 4813 scope.go:117] "RemoveContainer" containerID="dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328831 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c"} err="failed to get container status \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": rpc error: code = NotFound desc = could not find container \"dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c\": container with ID starting with dcbed1a9097c5f64b65cd743d61498d540ac3949eb9ab1e555c2ea5124bac22c not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.328855 4813 scope.go:117] "RemoveContainer" containerID="78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.329172 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7"} err="failed to get container status \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": rpc error: code = NotFound desc = could not find container \"78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7\": container with ID starting with 78ad0c0e5d93aea8920c5d59edc39c9047576d47503f224ae0e8316ca28934c7 not found: ID does not exist" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388278 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-log-httpd\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388363 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388396 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-run-httpd\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388432 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-scripts\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388505 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4b95\" (UniqueName: \"kubernetes.io/projected/4326342e-12ee-42a9-ba93-4f0e49b8b45c-kube-api-access-h4b95\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388529 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-config-data\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388571 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.389224 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.388982 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-log-httpd\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.389167 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-run-httpd\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.392172 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.393369 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-config-data\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.399281 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-scripts\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.399302 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.399327 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.404130 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4b95\" (UniqueName: \"kubernetes.io/projected/4326342e-12ee-42a9-ba93-4f0e49b8b45c-kube-api-access-h4b95\") pod \"ceilometer-0\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.546724 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.678376 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.685768 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.796855 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-operator-scripts\") pod \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.797329 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b0891f1-e2fe-40d3-bec6-5306428b0086-operator-scripts\") pod \"3b0891f1-e2fe-40d3-bec6-5306428b0086\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.797385 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwc2r\" (UniqueName: \"kubernetes.io/projected/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-kube-api-access-fwc2r\") pod \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\" (UID: \"c7e7f714-64b2-4d9c-856d-aaefe3b118cc\") " Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.797459 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g72l\" (UniqueName: \"kubernetes.io/projected/3b0891f1-e2fe-40d3-bec6-5306428b0086-kube-api-access-2g72l\") pod \"3b0891f1-e2fe-40d3-bec6-5306428b0086\" (UID: \"3b0891f1-e2fe-40d3-bec6-5306428b0086\") " Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.797636 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c7e7f714-64b2-4d9c-856d-aaefe3b118cc" (UID: "c7e7f714-64b2-4d9c-856d-aaefe3b118cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.798127 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.798146 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b0891f1-e2fe-40d3-bec6-5306428b0086-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3b0891f1-e2fe-40d3-bec6-5306428b0086" (UID: "3b0891f1-e2fe-40d3-bec6-5306428b0086"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.801864 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b0891f1-e2fe-40d3-bec6-5306428b0086-kube-api-access-2g72l" (OuterVolumeSpecName: "kube-api-access-2g72l") pod "3b0891f1-e2fe-40d3-bec6-5306428b0086" (UID: "3b0891f1-e2fe-40d3-bec6-5306428b0086"). InnerVolumeSpecName "kube-api-access-2g72l". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.804618 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-kube-api-access-fwc2r" (OuterVolumeSpecName: "kube-api-access-fwc2r") pod "c7e7f714-64b2-4d9c-856d-aaefe3b118cc" (UID: "c7e7f714-64b2-4d9c-856d-aaefe3b118cc"). InnerVolumeSpecName "kube-api-access-fwc2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.899333 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b0891f1-e2fe-40d3-bec6-5306428b0086-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.899382 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwc2r\" (UniqueName: \"kubernetes.io/projected/c7e7f714-64b2-4d9c-856d-aaefe3b118cc-kube-api-access-fwc2r\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:46 crc kubenswrapper[4813]: I0320 16:18:46.899397 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g72l\" (UniqueName: \"kubernetes.io/projected/3b0891f1-e2fe-40d3-bec6-5306428b0086-kube-api-access-2g72l\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.054996 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.173912 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerStarted","Data":"33fc497cb53a50e9d722dbd7b953220d3d025459668866e243ce823acddd862f"} Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.175582 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" event={"ID":"c7e7f714-64b2-4d9c-856d-aaefe3b118cc","Type":"ContainerDied","Data":"bafc6070e433a1cb96aae73af6f9afe4795540bac2a0d7efdedcbeeedc7e2294"} Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.175598 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-e8de-account-create-update-zzf68" Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.175615 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bafc6070e433a1cb96aae73af6f9afe4795540bac2a0d7efdedcbeeedc7e2294" Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.176977 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-9kz7w" event={"ID":"3b0891f1-e2fe-40d3-bec6-5306428b0086","Type":"ContainerDied","Data":"cfea68372358ab1cc7a31722cd53cd2c784e96bd9817bd462b04cdaf5fab2ab1"} Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.177004 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfea68372358ab1cc7a31722cd53cd2c784e96bd9817bd462b04cdaf5fab2ab1" Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.177068 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-9kz7w" Mar 20 16:18:47 crc kubenswrapper[4813]: I0320 16:18:47.274235 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f153c2ca-e44b-4702-ac34-dd1b9b3111b4" path="/var/lib/kubelet/pods/f153c2ca-e44b-4702-ac34-dd1b9b3111b4/volumes" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.190307 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerStarted","Data":"cabe83bd2fda0d47a0c1a3df7bc83891e18c670fabdd8c0d4c7d945e7dbfe9ad"} Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.662656 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f"] Mar 20 16:18:48 crc kubenswrapper[4813]: E0320 16:18:48.663070 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b0891f1-e2fe-40d3-bec6-5306428b0086" containerName="mariadb-database-create" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.663147 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b0891f1-e2fe-40d3-bec6-5306428b0086" containerName="mariadb-database-create" Mar 20 16:18:48 crc kubenswrapper[4813]: E0320 16:18:48.663215 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e7f714-64b2-4d9c-856d-aaefe3b118cc" containerName="mariadb-account-create-update" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.663262 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e7f714-64b2-4d9c-856d-aaefe3b118cc" containerName="mariadb-account-create-update" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.663505 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e7f714-64b2-4d9c-856d-aaefe3b118cc" containerName="mariadb-account-create-update" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.663573 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b0891f1-e2fe-40d3-bec6-5306428b0086" containerName="mariadb-database-create" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.664178 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.669045 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.669280 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-bkmps" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.670805 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f"] Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.741216 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-config-data\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.741612 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.741663 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-db-sync-config-data\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.741691 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtv9v\" (UniqueName: \"kubernetes.io/projected/4b313902-ea68-4b39-87c5-0001aa4005a9-kube-api-access-xtv9v\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.843429 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-config-data\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.843516 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.843762 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-db-sync-config-data\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.843804 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtv9v\" (UniqueName: \"kubernetes.io/projected/4b313902-ea68-4b39-87c5-0001aa4005a9-kube-api-access-xtv9v\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.848969 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-db-sync-config-data\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.849119 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.850197 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-config-data\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.868899 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtv9v\" (UniqueName: \"kubernetes.io/projected/4b313902-ea68-4b39-87c5-0001aa4005a9-kube-api-access-xtv9v\") pod \"watcher-kuttl-db-sync-2fr4f\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:48 crc kubenswrapper[4813]: I0320 16:18:48.991133 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:49 crc kubenswrapper[4813]: I0320 16:18:49.231096 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerStarted","Data":"608df3f0bef28af1000f91c41df94de77d4e15b34ed365ec028d4c7d4490be5d"} Mar 20 16:18:49 crc kubenswrapper[4813]: I0320 16:18:49.437355 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f"] Mar 20 16:18:49 crc kubenswrapper[4813]: W0320 16:18:49.446996 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b313902_ea68_4b39_87c5_0001aa4005a9.slice/crio-3860753e5cf1295391a277ddec283b8734219598620cebd779d9fa77f714f0e5 WatchSource:0}: Error finding container 3860753e5cf1295391a277ddec283b8734219598620cebd779d9fa77f714f0e5: Status 404 returned error can't find the container with id 3860753e5cf1295391a277ddec283b8734219598620cebd779d9fa77f714f0e5 Mar 20 16:18:50 crc kubenswrapper[4813]: I0320 16:18:50.239682 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerStarted","Data":"5719bc0a0e3cf1e3e1234af68379b8359e83d0cd8c27cd0d05065669ccbd01fa"} Mar 20 16:18:50 crc kubenswrapper[4813]: I0320 16:18:50.241078 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" event={"ID":"4b313902-ea68-4b39-87c5-0001aa4005a9","Type":"ContainerStarted","Data":"c81fa87e0e734631fdb2fb48fe57ab6dbe7cc49340583611d0c03839fa13feff"} Mar 20 16:18:50 crc kubenswrapper[4813]: I0320 16:18:50.241183 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" event={"ID":"4b313902-ea68-4b39-87c5-0001aa4005a9","Type":"ContainerStarted","Data":"3860753e5cf1295391a277ddec283b8734219598620cebd779d9fa77f714f0e5"} Mar 20 16:18:51 crc kubenswrapper[4813]: I0320 16:18:51.307752 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" podStartSLOduration=3.307735415 podStartE2EDuration="3.307735415s" podCreationTimestamp="2026-03-20 16:18:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:50.283728563 +0000 UTC m=+2459.706431414" watchObservedRunningTime="2026-03-20 16:18:51.307735415 +0000 UTC m=+2460.730438256" Mar 20 16:18:52 crc kubenswrapper[4813]: I0320 16:18:52.259895 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerStarted","Data":"88e571d463971a0d10fb26b48febf47bf33e6cfbb5ad61c523a78025ff2b0d3d"} Mar 20 16:18:52 crc kubenswrapper[4813]: I0320 16:18:52.260258 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:18:52 crc kubenswrapper[4813]: I0320 16:18:52.261693 4813 generic.go:334] "Generic (PLEG): container finished" podID="4b313902-ea68-4b39-87c5-0001aa4005a9" containerID="c81fa87e0e734631fdb2fb48fe57ab6dbe7cc49340583611d0c03839fa13feff" exitCode=0 Mar 20 16:18:52 crc kubenswrapper[4813]: I0320 16:18:52.261741 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" event={"ID":"4b313902-ea68-4b39-87c5-0001aa4005a9","Type":"ContainerDied","Data":"c81fa87e0e734631fdb2fb48fe57ab6dbe7cc49340583611d0c03839fa13feff"} Mar 20 16:18:52 crc kubenswrapper[4813]: I0320 16:18:52.284294 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.234825221 podStartE2EDuration="6.284275698s" podCreationTimestamp="2026-03-20 16:18:46 +0000 UTC" firstStartedPulling="2026-03-20 16:18:47.045443317 +0000 UTC m=+2456.468146188" lastFinishedPulling="2026-03-20 16:18:51.094893824 +0000 UTC m=+2460.517596665" observedRunningTime="2026-03-20 16:18:52.27914628 +0000 UTC m=+2461.701849131" watchObservedRunningTime="2026-03-20 16:18:52.284275698 +0000 UTC m=+2461.706978539" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.734563 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.834441 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtv9v\" (UniqueName: \"kubernetes.io/projected/4b313902-ea68-4b39-87c5-0001aa4005a9-kube-api-access-xtv9v\") pod \"4b313902-ea68-4b39-87c5-0001aa4005a9\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.834773 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-combined-ca-bundle\") pod \"4b313902-ea68-4b39-87c5-0001aa4005a9\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.834845 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-db-sync-config-data\") pod \"4b313902-ea68-4b39-87c5-0001aa4005a9\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.834987 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-config-data\") pod \"4b313902-ea68-4b39-87c5-0001aa4005a9\" (UID: \"4b313902-ea68-4b39-87c5-0001aa4005a9\") " Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.840756 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4b313902-ea68-4b39-87c5-0001aa4005a9" (UID: "4b313902-ea68-4b39-87c5-0001aa4005a9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.843654 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b313902-ea68-4b39-87c5-0001aa4005a9-kube-api-access-xtv9v" (OuterVolumeSpecName: "kube-api-access-xtv9v") pod "4b313902-ea68-4b39-87c5-0001aa4005a9" (UID: "4b313902-ea68-4b39-87c5-0001aa4005a9"). InnerVolumeSpecName "kube-api-access-xtv9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.857623 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b313902-ea68-4b39-87c5-0001aa4005a9" (UID: "4b313902-ea68-4b39-87c5-0001aa4005a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.892608 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-config-data" (OuterVolumeSpecName: "config-data") pod "4b313902-ea68-4b39-87c5-0001aa4005a9" (UID: "4b313902-ea68-4b39-87c5-0001aa4005a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.937921 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtv9v\" (UniqueName: \"kubernetes.io/projected/4b313902-ea68-4b39-87c5-0001aa4005a9-kube-api-access-xtv9v\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.938285 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.938394 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:53 crc kubenswrapper[4813]: I0320 16:18:53.938516 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b313902-ea68-4b39-87c5-0001aa4005a9-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.282371 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" event={"ID":"4b313902-ea68-4b39-87c5-0001aa4005a9","Type":"ContainerDied","Data":"3860753e5cf1295391a277ddec283b8734219598620cebd779d9fa77f714f0e5"} Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.282449 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3860753e5cf1295391a277ddec283b8734219598620cebd779d9fa77f714f0e5" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.282858 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.689092 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:54 crc kubenswrapper[4813]: E0320 16:18:54.689748 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b313902-ea68-4b39-87c5-0001aa4005a9" containerName="watcher-kuttl-db-sync" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.689835 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b313902-ea68-4b39-87c5-0001aa4005a9" containerName="watcher-kuttl-db-sync" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.690120 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b313902-ea68-4b39-87c5-0001aa4005a9" containerName="watcher-kuttl-db-sync" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.690922 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.693817 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-bkmps" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.694045 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.699624 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.701224 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.706737 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.706960 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-internal-svc" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.711610 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-public-svc" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.719615 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.734886 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.736273 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.740187 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.750559 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.750786 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.750924 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6pxq\" (UniqueName: \"kubernetes.io/projected/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-kube-api-access-m6pxq\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751020 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751131 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751236 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751335 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751435 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751532 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eee6f20-c009-411b-8d26-f84b7e0667cc-logs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751628 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751747 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2wnm\" (UniqueName: \"kubernetes.io/projected/1eee6f20-c009-411b-8d26-f84b7e0667cc-kube-api-access-j2wnm\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.751845 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.776569 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.787156 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853724 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853772 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853798 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eee6f20-c009-411b-8d26-f84b7e0667cc-logs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853821 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853854 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2wnm\" (UniqueName: \"kubernetes.io/projected/1eee6f20-c009-411b-8d26-f84b7e0667cc-kube-api-access-j2wnm\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853872 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853893 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853910 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv8gc\" (UniqueName: \"kubernetes.io/projected/02ee470b-e994-4934-b0e0-c5410fe0898b-kube-api-access-qv8gc\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853931 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853957 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.853987 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6pxq\" (UniqueName: \"kubernetes.io/projected/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-kube-api-access-m6pxq\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.854003 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.854031 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.854052 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.854066 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.854082 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02ee470b-e994-4934-b0e0-c5410fe0898b-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.855001 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eee6f20-c009-411b-8d26-f84b7e0667cc-logs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.857591 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.858056 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.858235 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.858819 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.859243 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.859440 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.864565 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.864662 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.873862 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6pxq\" (UniqueName: \"kubernetes.io/projected/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-kube-api-access-m6pxq\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.875962 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.882344 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2wnm\" (UniqueName: \"kubernetes.io/projected/1eee6f20-c009-411b-8d26-f84b7e0667cc-kube-api-access-j2wnm\") pod \"watcher-kuttl-api-0\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.955062 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv8gc\" (UniqueName: \"kubernetes.io/projected/02ee470b-e994-4934-b0e0-c5410fe0898b-kube-api-access-qv8gc\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.955401 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.955573 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02ee470b-e994-4934-b0e0-c5410fe0898b-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.955717 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.956088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02ee470b-e994-4934-b0e0-c5410fe0898b-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.958647 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.959403 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:54 crc kubenswrapper[4813]: I0320 16:18:54.971171 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv8gc\" (UniqueName: \"kubernetes.io/projected/02ee470b-e994-4934-b0e0-c5410fe0898b-kube-api-access-qv8gc\") pod \"watcher-kuttl-applier-0\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:55 crc kubenswrapper[4813]: I0320 16:18:55.020135 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:18:55 crc kubenswrapper[4813]: I0320 16:18:55.027416 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:55 crc kubenswrapper[4813]: I0320 16:18:55.051824 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:18:55 crc kubenswrapper[4813]: W0320 16:18:55.473411 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1eee6f20_c009_411b_8d26_f84b7e0667cc.slice/crio-f8c05c45f996186f856a9bf4255d26762a0a62f13388b0b57428db0feda34da3 WatchSource:0}: Error finding container f8c05c45f996186f856a9bf4255d26762a0a62f13388b0b57428db0feda34da3: Status 404 returned error can't find the container with id f8c05c45f996186f856a9bf4255d26762a0a62f13388b0b57428db0feda34da3 Mar 20 16:18:55 crc kubenswrapper[4813]: I0320 16:18:55.487089 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:18:55 crc kubenswrapper[4813]: I0320 16:18:55.535266 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:18:55 crc kubenswrapper[4813]: I0320 16:18:55.618567 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:18:55 crc kubenswrapper[4813]: W0320 16:18:55.627203 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02ee470b_e994_4934_b0e0_c5410fe0898b.slice/crio-865038d60c8692831e89e96872d22454a762cffeb6db728a3f4e274105dde968 WatchSource:0}: Error finding container 865038d60c8692831e89e96872d22454a762cffeb6db728a3f4e274105dde968: Status 404 returned error can't find the container with id 865038d60c8692831e89e96872d22454a762cffeb6db728a3f4e274105dde968 Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.300838 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"02ee470b-e994-4934-b0e0-c5410fe0898b","Type":"ContainerStarted","Data":"a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98"} Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.301166 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"02ee470b-e994-4934-b0e0-c5410fe0898b","Type":"ContainerStarted","Data":"865038d60c8692831e89e96872d22454a762cffeb6db728a3f4e274105dde968"} Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.306238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"1eee6f20-c009-411b-8d26-f84b7e0667cc","Type":"ContainerStarted","Data":"cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8"} Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.306278 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"1eee6f20-c009-411b-8d26-f84b7e0667cc","Type":"ContainerStarted","Data":"93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792"} Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.306288 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"1eee6f20-c009-411b-8d26-f84b7e0667cc","Type":"ContainerStarted","Data":"f8c05c45f996186f856a9bf4255d26762a0a62f13388b0b57428db0feda34da3"} Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.307226 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.312111 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"ec987ea4-a423-44c7-bef1-8fc2e0536b1e","Type":"ContainerStarted","Data":"30bd9fd73b1c3fa76d9a32fdf55e461858abdb478020e39d66b3c690f21d083e"} Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.312140 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"ec987ea4-a423-44c7-bef1-8fc2e0536b1e","Type":"ContainerStarted","Data":"52c4b483d285ba1fcc002bd74c350b6df3478c0ba07e1b8aea68b11380b818c5"} Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.327189 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=2.327171839 podStartE2EDuration="2.327171839s" podCreationTimestamp="2026-03-20 16:18:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:56.321903247 +0000 UTC m=+2465.744606098" watchObservedRunningTime="2026-03-20 16:18:56.327171839 +0000 UTC m=+2465.749874680" Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.386662 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.386622823 podStartE2EDuration="2.386622823s" podCreationTimestamp="2026-03-20 16:18:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:56.355401571 +0000 UTC m=+2465.778104412" watchObservedRunningTime="2026-03-20 16:18:56.386622823 +0000 UTC m=+2465.809325664" Mar 20 16:18:56 crc kubenswrapper[4813]: I0320 16:18:56.387835 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.387826865 podStartE2EDuration="2.387826865s" podCreationTimestamp="2026-03-20 16:18:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:18:56.383867629 +0000 UTC m=+2465.806570470" watchObservedRunningTime="2026-03-20 16:18:56.387826865 +0000 UTC m=+2465.810529706" Mar 20 16:18:58 crc kubenswrapper[4813]: I0320 16:18:58.334122 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 16:18:58 crc kubenswrapper[4813]: I0320 16:18:58.534079 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:00 crc kubenswrapper[4813]: I0320 16:19:00.028593 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:00 crc kubenswrapper[4813]: I0320 16:19:00.052686 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:03 crc kubenswrapper[4813]: I0320 16:19:03.843250 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:19:03 crc kubenswrapper[4813]: I0320 16:19:03.843666 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:19:03 crc kubenswrapper[4813]: E0320 16:19:03.926924 4813 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.181:34102->38.102.83.181:37193: write tcp 38.102.83.181:34102->38.102.83.181:37193: write: broken pipe Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.021072 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.028412 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.042928 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.049496 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.052829 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.080801 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.393583 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.405854 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.417497 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:05 crc kubenswrapper[4813]: I0320 16:19:05.419968 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:07 crc kubenswrapper[4813]: I0320 16:19:07.618999 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:07 crc kubenswrapper[4813]: I0320 16:19:07.619581 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-central-agent" containerID="cri-o://cabe83bd2fda0d47a0c1a3df7bc83891e18c670fabdd8c0d4c7d945e7dbfe9ad" gracePeriod=30 Mar 20 16:19:07 crc kubenswrapper[4813]: I0320 16:19:07.620030 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="proxy-httpd" containerID="cri-o://88e571d463971a0d10fb26b48febf47bf33e6cfbb5ad61c523a78025ff2b0d3d" gracePeriod=30 Mar 20 16:19:07 crc kubenswrapper[4813]: I0320 16:19:07.620182 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-notification-agent" containerID="cri-o://608df3f0bef28af1000f91c41df94de77d4e15b34ed365ec028d4c7d4490be5d" gracePeriod=30 Mar 20 16:19:07 crc kubenswrapper[4813]: I0320 16:19:07.620230 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="sg-core" containerID="cri-o://5719bc0a0e3cf1e3e1234af68379b8359e83d0cd8c27cd0d05065669ccbd01fa" gracePeriod=30 Mar 20 16:19:07 crc kubenswrapper[4813]: I0320 16:19:07.627639 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.177:3000/\": read tcp 10.217.0.2:55584->10.217.0.177:3000: read: connection reset by peer" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418500 4813 generic.go:334] "Generic (PLEG): container finished" podID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerID="88e571d463971a0d10fb26b48febf47bf33e6cfbb5ad61c523a78025ff2b0d3d" exitCode=0 Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418779 4813 generic.go:334] "Generic (PLEG): container finished" podID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerID="5719bc0a0e3cf1e3e1234af68379b8359e83d0cd8c27cd0d05065669ccbd01fa" exitCode=2 Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418787 4813 generic.go:334] "Generic (PLEG): container finished" podID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerID="608df3f0bef28af1000f91c41df94de77d4e15b34ed365ec028d4c7d4490be5d" exitCode=0 Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418793 4813 generic.go:334] "Generic (PLEG): container finished" podID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerID="cabe83bd2fda0d47a0c1a3df7bc83891e18c670fabdd8c0d4c7d945e7dbfe9ad" exitCode=0 Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418811 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerDied","Data":"88e571d463971a0d10fb26b48febf47bf33e6cfbb5ad61c523a78025ff2b0d3d"} Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418835 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerDied","Data":"5719bc0a0e3cf1e3e1234af68379b8359e83d0cd8c27cd0d05065669ccbd01fa"} Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418846 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerDied","Data":"608df3f0bef28af1000f91c41df94de77d4e15b34ed365ec028d4c7d4490be5d"} Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.418854 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerDied","Data":"cabe83bd2fda0d47a0c1a3df7bc83891e18c670fabdd8c0d4c7d945e7dbfe9ad"} Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.484525 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.571156 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.573359 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-kuttl-api-log" containerID="cri-o://93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792" gracePeriod=30 Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.573935 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-api" containerID="cri-o://cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8" gracePeriod=30 Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.625924 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-scripts\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.626015 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-run-httpd\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.626086 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-sg-core-conf-yaml\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.626122 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-combined-ca-bundle\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.626159 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-config-data\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.626239 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-log-httpd\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.626286 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-ceilometer-tls-certs\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.626317 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4b95\" (UniqueName: \"kubernetes.io/projected/4326342e-12ee-42a9-ba93-4f0e49b8b45c-kube-api-access-h4b95\") pod \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\" (UID: \"4326342e-12ee-42a9-ba93-4f0e49b8b45c\") " Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.627340 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.627360 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.632202 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-scripts" (OuterVolumeSpecName: "scripts") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.636684 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4326342e-12ee-42a9-ba93-4f0e49b8b45c-kube-api-access-h4b95" (OuterVolumeSpecName: "kube-api-access-h4b95") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "kube-api-access-h4b95". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.663499 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.687022 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.712697 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.721138 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-config-data" (OuterVolumeSpecName: "config-data") pod "4326342e-12ee-42a9-ba93-4f0e49b8b45c" (UID: "4326342e-12ee-42a9-ba93-4f0e49b8b45c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728041 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728073 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728084 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728095 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728102 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728113 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4326342e-12ee-42a9-ba93-4f0e49b8b45c-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728121 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4326342e-12ee-42a9-ba93-4f0e49b8b45c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:08 crc kubenswrapper[4813]: I0320 16:19:08.728129 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4b95\" (UniqueName: \"kubernetes.io/projected/4326342e-12ee-42a9-ba93-4f0e49b8b45c-kube-api-access-h4b95\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.429461 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4326342e-12ee-42a9-ba93-4f0e49b8b45c","Type":"ContainerDied","Data":"33fc497cb53a50e9d722dbd7b953220d3d025459668866e243ce823acddd862f"} Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.430004 4813 scope.go:117] "RemoveContainer" containerID="88e571d463971a0d10fb26b48febf47bf33e6cfbb5ad61c523a78025ff2b0d3d" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.429721 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.432708 4813 generic.go:334] "Generic (PLEG): container finished" podID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerID="93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792" exitCode=143 Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.432750 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"1eee6f20-c009-411b-8d26-f84b7e0667cc","Type":"ContainerDied","Data":"93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792"} Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.460439 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.460516 4813 scope.go:117] "RemoveContainer" containerID="5719bc0a0e3cf1e3e1234af68379b8359e83d0cd8c27cd0d05065669ccbd01fa" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.470035 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.488690 4813 scope.go:117] "RemoveContainer" containerID="608df3f0bef28af1000f91c41df94de77d4e15b34ed365ec028d4c7d4490be5d" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507166 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:09 crc kubenswrapper[4813]: E0320 16:19:09.507568 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="sg-core" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507589 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="sg-core" Mar 20 16:19:09 crc kubenswrapper[4813]: E0320 16:19:09.507607 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-central-agent" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507618 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-central-agent" Mar 20 16:19:09 crc kubenswrapper[4813]: E0320 16:19:09.507633 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="proxy-httpd" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507641 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="proxy-httpd" Mar 20 16:19:09 crc kubenswrapper[4813]: E0320 16:19:09.507654 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-notification-agent" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507661 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-notification-agent" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507869 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="sg-core" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507891 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-central-agent" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507904 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="proxy-httpd" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.507915 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" containerName="ceilometer-notification-agent" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.511252 4813 scope.go:117] "RemoveContainer" containerID="cabe83bd2fda0d47a0c1a3df7bc83891e18c670fabdd8c0d4c7d945e7dbfe9ad" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.517741 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.524160 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.528925 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.529021 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.529456 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652337 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-run-httpd\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652387 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652407 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qd4m\" (UniqueName: \"kubernetes.io/projected/c4207a82-76eb-404e-8e47-e5241c966fbd-kube-api-access-5qd4m\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652425 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-config-data\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652441 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-scripts\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652462 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652491 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-log-httpd\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.652621 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.753867 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-log-httpd\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.753997 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754043 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-run-httpd\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754071 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754090 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qd4m\" (UniqueName: \"kubernetes.io/projected/c4207a82-76eb-404e-8e47-e5241c966fbd-kube-api-access-5qd4m\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754110 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-config-data\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754126 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-scripts\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754150 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754345 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-log-httpd\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.754964 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-run-httpd\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.758873 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.759877 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.760094 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-scripts\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.760173 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-config-data\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.763219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.777166 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qd4m\" (UniqueName: \"kubernetes.io/projected/c4207a82-76eb-404e-8e47-e5241c966fbd-kube-api-access-5qd4m\") pod \"ceilometer-0\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:09 crc kubenswrapper[4813]: I0320 16:19:09.849944 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.037715 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.163770 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eee6f20-c009-411b-8d26-f84b7e0667cc-logs\") pod \"1eee6f20-c009-411b-8d26-f84b7e0667cc\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.164072 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-internal-tls-certs\") pod \"1eee6f20-c009-411b-8d26-f84b7e0667cc\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.164106 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-combined-ca-bundle\") pod \"1eee6f20-c009-411b-8d26-f84b7e0667cc\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.164195 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2wnm\" (UniqueName: \"kubernetes.io/projected/1eee6f20-c009-411b-8d26-f84b7e0667cc-kube-api-access-j2wnm\") pod \"1eee6f20-c009-411b-8d26-f84b7e0667cc\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.164236 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-custom-prometheus-ca\") pod \"1eee6f20-c009-411b-8d26-f84b7e0667cc\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.164269 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-public-tls-certs\") pod \"1eee6f20-c009-411b-8d26-f84b7e0667cc\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.164289 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-config-data\") pod \"1eee6f20-c009-411b-8d26-f84b7e0667cc\" (UID: \"1eee6f20-c009-411b-8d26-f84b7e0667cc\") " Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.166838 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eee6f20-c009-411b-8d26-f84b7e0667cc-logs" (OuterVolumeSpecName: "logs") pod "1eee6f20-c009-411b-8d26-f84b7e0667cc" (UID: "1eee6f20-c009-411b-8d26-f84b7e0667cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.169195 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eee6f20-c009-411b-8d26-f84b7e0667cc-kube-api-access-j2wnm" (OuterVolumeSpecName: "kube-api-access-j2wnm") pod "1eee6f20-c009-411b-8d26-f84b7e0667cc" (UID: "1eee6f20-c009-411b-8d26-f84b7e0667cc"). InnerVolumeSpecName "kube-api-access-j2wnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.185463 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1eee6f20-c009-411b-8d26-f84b7e0667cc" (UID: "1eee6f20-c009-411b-8d26-f84b7e0667cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.189262 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "1eee6f20-c009-411b-8d26-f84b7e0667cc" (UID: "1eee6f20-c009-411b-8d26-f84b7e0667cc"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.206839 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1eee6f20-c009-411b-8d26-f84b7e0667cc" (UID: "1eee6f20-c009-411b-8d26-f84b7e0667cc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.210738 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1eee6f20-c009-411b-8d26-f84b7e0667cc" (UID: "1eee6f20-c009-411b-8d26-f84b7e0667cc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.212634 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-config-data" (OuterVolumeSpecName: "config-data") pod "1eee6f20-c009-411b-8d26-f84b7e0667cc" (UID: "1eee6f20-c009-411b-8d26-f84b7e0667cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.266223 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.266247 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.266256 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.266266 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1eee6f20-c009-411b-8d26-f84b7e0667cc-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.266274 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.266282 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eee6f20-c009-411b-8d26-f84b7e0667cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.266291 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2wnm\" (UniqueName: \"kubernetes.io/projected/1eee6f20-c009-411b-8d26-f84b7e0667cc-kube-api-access-j2wnm\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.343193 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:10 crc kubenswrapper[4813]: W0320 16:19:10.346555 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4207a82_76eb_404e_8e47_e5241c966fbd.slice/crio-0d50697a7ab8d83479879fe9728bf10380a840aff0978e4d171052149a46a382 WatchSource:0}: Error finding container 0d50697a7ab8d83479879fe9728bf10380a840aff0978e4d171052149a46a382: Status 404 returned error can't find the container with id 0d50697a7ab8d83479879fe9728bf10380a840aff0978e4d171052149a46a382 Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.443674 4813 generic.go:334] "Generic (PLEG): container finished" podID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerID="cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8" exitCode=0 Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.443760 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"1eee6f20-c009-411b-8d26-f84b7e0667cc","Type":"ContainerDied","Data":"cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8"} Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.443796 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"1eee6f20-c009-411b-8d26-f84b7e0667cc","Type":"ContainerDied","Data":"f8c05c45f996186f856a9bf4255d26762a0a62f13388b0b57428db0feda34da3"} Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.443821 4813 scope.go:117] "RemoveContainer" containerID="cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.443939 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.447236 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerStarted","Data":"0d50697a7ab8d83479879fe9728bf10380a840aff0978e4d171052149a46a382"} Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.476049 4813 scope.go:117] "RemoveContainer" containerID="93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.487455 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.503719 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.505147 4813 scope.go:117] "RemoveContainer" containerID="cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8" Mar 20 16:19:10 crc kubenswrapper[4813]: E0320 16:19:10.505690 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8\": container with ID starting with cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8 not found: ID does not exist" containerID="cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.505733 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8"} err="failed to get container status \"cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8\": rpc error: code = NotFound desc = could not find container \"cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8\": container with ID starting with cb36b05853358f1aafa1a5cc1232d9242cd4dc2511388acef07d857a566b0db8 not found: ID does not exist" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.505765 4813 scope.go:117] "RemoveContainer" containerID="93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792" Mar 20 16:19:10 crc kubenswrapper[4813]: E0320 16:19:10.506265 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792\": container with ID starting with 93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792 not found: ID does not exist" containerID="93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.506302 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792"} err="failed to get container status \"93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792\": rpc error: code = NotFound desc = could not find container \"93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792\": container with ID starting with 93544a7a1204ead38cf9384687c0fe367093d36a905ed71aa8199a33765cf792 not found: ID does not exist" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.508775 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:10 crc kubenswrapper[4813]: E0320 16:19:10.509158 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-kuttl-api-log" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.509182 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-kuttl-api-log" Mar 20 16:19:10 crc kubenswrapper[4813]: E0320 16:19:10.509207 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-api" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.509216 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-api" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.509521 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-api" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.509545 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-kuttl-api-log" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.510655 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.513534 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.513576 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-public-svc" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.513740 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-internal-svc" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.523429 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.673127 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.673203 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.673225 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80d44ced-5f6d-4604-836b-2e58b062b136-logs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.673256 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzjjh\" (UniqueName: \"kubernetes.io/projected/80d44ced-5f6d-4604-836b-2e58b062b136-kube-api-access-pzjjh\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.673278 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.673315 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.673356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.774934 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.774980 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.775031 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.775050 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80d44ced-5f6d-4604-836b-2e58b062b136-logs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.775078 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzjjh\" (UniqueName: \"kubernetes.io/projected/80d44ced-5f6d-4604-836b-2e58b062b136-kube-api-access-pzjjh\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.775096 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.775133 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.776209 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80d44ced-5f6d-4604-836b-2e58b062b136-logs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.778942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.778942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.778969 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.779601 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.779928 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.798018 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzjjh\" (UniqueName: \"kubernetes.io/projected/80d44ced-5f6d-4604-836b-2e58b062b136-kube-api-access-pzjjh\") pod \"watcher-kuttl-api-0\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:10 crc kubenswrapper[4813]: I0320 16:19:10.844857 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:11 crc kubenswrapper[4813]: I0320 16:19:11.263191 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:11 crc kubenswrapper[4813]: I0320 16:19:11.327302 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" path="/var/lib/kubelet/pods/1eee6f20-c009-411b-8d26-f84b7e0667cc/volumes" Mar 20 16:19:11 crc kubenswrapper[4813]: I0320 16:19:11.328165 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4326342e-12ee-42a9-ba93-4f0e49b8b45c" path="/var/lib/kubelet/pods/4326342e-12ee-42a9-ba93-4f0e49b8b45c/volumes" Mar 20 16:19:11 crc kubenswrapper[4813]: I0320 16:19:11.459440 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"80d44ced-5f6d-4604-836b-2e58b062b136","Type":"ContainerStarted","Data":"af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50"} Mar 20 16:19:11 crc kubenswrapper[4813]: I0320 16:19:11.459767 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"80d44ced-5f6d-4604-836b-2e58b062b136","Type":"ContainerStarted","Data":"c5ba3c81a8f379d2e9e217a107097cb2a81bbc4789b96cff0289e5fe35b8f7d8"} Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.483257 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"80d44ced-5f6d-4604-836b-2e58b062b136","Type":"ContainerStarted","Data":"6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6"} Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.483695 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.486769 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerStarted","Data":"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271"} Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.516911 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.5168929 podStartE2EDuration="2.5168929s" podCreationTimestamp="2026-03-20 16:19:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:19:12.51208497 +0000 UTC m=+2481.934787811" watchObservedRunningTime="2026-03-20 16:19:12.5168929 +0000 UTC m=+2481.939595741" Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.935665 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f"] Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.941816 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-2fr4f"] Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.995894 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watchere8de-account-delete-4nzjk"] Mar 20 16:19:12 crc kubenswrapper[4813]: I0320 16:19:12.997158 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.022276 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watchere8de-account-delete-4nzjk"] Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.095235 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.095437 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="ec987ea4-a423-44c7-bef1-8fc2e0536b1e" containerName="watcher-decision-engine" containerID="cri-o://30bd9fd73b1c3fa76d9a32fdf55e461858abdb478020e39d66b3c690f21d083e" gracePeriod=30 Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.110892 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.111144 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="02ee470b-e994-4934-b0e0-c5410fe0898b" containerName="watcher-applier" containerID="cri-o://a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" gracePeriod=30 Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.123572 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm8xq\" (UniqueName: \"kubernetes.io/projected/fe5c76dc-10d0-4952-a661-427592466850-kube-api-access-zm8xq\") pod \"watchere8de-account-delete-4nzjk\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.123672 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5c76dc-10d0-4952-a661-427592466850-operator-scripts\") pod \"watchere8de-account-delete-4nzjk\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.176112 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.237360 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5c76dc-10d0-4952-a661-427592466850-operator-scripts\") pod \"watchere8de-account-delete-4nzjk\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.238039 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm8xq\" (UniqueName: \"kubernetes.io/projected/fe5c76dc-10d0-4952-a661-427592466850-kube-api-access-zm8xq\") pod \"watchere8de-account-delete-4nzjk\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.238287 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5c76dc-10d0-4952-a661-427592466850-operator-scripts\") pod \"watchere8de-account-delete-4nzjk\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.261710 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm8xq\" (UniqueName: \"kubernetes.io/projected/fe5c76dc-10d0-4952-a661-427592466850-kube-api-access-zm8xq\") pod \"watchere8de-account-delete-4nzjk\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.293706 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b313902-ea68-4b39-87c5-0001aa4005a9" path="/var/lib/kubelet/pods/4b313902-ea68-4b39-87c5-0001aa4005a9/volumes" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.327531 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.542247 4813 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="watcher-kuttl-default/watcher-kuttl-api-0" secret="" err="secret \"watcher-watcher-kuttl-dockercfg-bkmps\" not found" Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.542279 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerStarted","Data":"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97"} Mar 20 16:19:13 crc kubenswrapper[4813]: E0320 16:19:13.645693 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-api-config-data: secret "watcher-kuttl-api-config-data" not found Mar 20 16:19:13 crc kubenswrapper[4813]: E0320 16:19:13.645748 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data podName:80d44ced-5f6d-4604-836b-2e58b062b136 nodeName:}" failed. No retries permitted until 2026-03-20 16:19:14.145734032 +0000 UTC m=+2483.568436873 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data") pod "watcher-kuttl-api-0" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136") : secret "watcher-kuttl-api-config-data" not found Mar 20 16:19:13 crc kubenswrapper[4813]: I0320 16:19:13.947538 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watchere8de-account-delete-4nzjk"] Mar 20 16:19:13 crc kubenswrapper[4813]: W0320 16:19:13.952528 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe5c76dc_10d0_4952_a661_427592466850.slice/crio-759da470af73ceba3b3a436cfc5c9250f33bfa7298ca8b9b4e12cd4ba6de8f50 WatchSource:0}: Error finding container 759da470af73ceba3b3a436cfc5c9250f33bfa7298ca8b9b4e12cd4ba6de8f50: Status 404 returned error can't find the container with id 759da470af73ceba3b3a436cfc5c9250f33bfa7298ca8b9b4e12cd4ba6de8f50 Mar 20 16:19:14 crc kubenswrapper[4813]: E0320 16:19:14.152649 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-api-config-data: secret "watcher-kuttl-api-config-data" not found Mar 20 16:19:14 crc kubenswrapper[4813]: E0320 16:19:14.152716 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data podName:80d44ced-5f6d-4604-836b-2e58b062b136 nodeName:}" failed. No retries permitted until 2026-03-20 16:19:15.152701828 +0000 UTC m=+2484.575404669 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data") pod "watcher-kuttl-api-0" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136") : secret "watcher-kuttl-api-config-data" not found Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.553554 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerStarted","Data":"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6"} Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.555301 4813 generic.go:334] "Generic (PLEG): container finished" podID="fe5c76dc-10d0-4952-a661-427592466850" containerID="75a20fd563dd85f81f8f5205742ce1365ea95c339ca9d56a571aee10f13abd9a" exitCode=0 Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.555403 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" event={"ID":"fe5c76dc-10d0-4952-a661-427592466850","Type":"ContainerDied","Data":"75a20fd563dd85f81f8f5205742ce1365ea95c339ca9d56a571aee10f13abd9a"} Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.555654 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" event={"ID":"fe5c76dc-10d0-4952-a661-427592466850","Type":"ContainerStarted","Data":"759da470af73ceba3b3a436cfc5c9250f33bfa7298ca8b9b4e12cd4ba6de8f50"} Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.555667 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.555802 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-kuttl-api-log" containerID="cri-o://af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50" gracePeriod=30 Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.555827 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-api" containerID="cri-o://6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6" gracePeriod=30 Mar 20 16:19:14 crc kubenswrapper[4813]: I0320 16:19:14.580957 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-api" probeResult="failure" output="Get \"https://10.217.0.183:9322/\": EOF" Mar 20 16:19:15 crc kubenswrapper[4813]: I0320 16:19:15.028929 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-kuttl-api-log" probeResult="failure" output="Get \"https://10.217.0.180:9322/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 16:19:15 crc kubenswrapper[4813]: I0320 16:19:15.028971 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="1eee6f20-c009-411b-8d26-f84b7e0667cc" containerName="watcher-api" probeResult="failure" output="Get \"https://10.217.0.180:9322/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 16:19:15 crc kubenswrapper[4813]: E0320 16:19:15.054243 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:19:15 crc kubenswrapper[4813]: E0320 16:19:15.055493 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:19:15 crc kubenswrapper[4813]: E0320 16:19:15.056535 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:19:15 crc kubenswrapper[4813]: E0320 16:19:15.056575 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="02ee470b-e994-4934-b0e0-c5410fe0898b" containerName="watcher-applier" Mar 20 16:19:15 crc kubenswrapper[4813]: E0320 16:19:15.174281 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-api-config-data: secret "watcher-kuttl-api-config-data" not found Mar 20 16:19:15 crc kubenswrapper[4813]: E0320 16:19:15.174378 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data podName:80d44ced-5f6d-4604-836b-2e58b062b136 nodeName:}" failed. No retries permitted until 2026-03-20 16:19:17.174355548 +0000 UTC m=+2486.597058389 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data") pod "watcher-kuttl-api-0" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136") : secret "watcher-kuttl-api-config-data" not found Mar 20 16:19:15 crc kubenswrapper[4813]: I0320 16:19:15.565724 4813 generic.go:334] "Generic (PLEG): container finished" podID="80d44ced-5f6d-4604-836b-2e58b062b136" containerID="af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50" exitCode=143 Mar 20 16:19:15 crc kubenswrapper[4813]: I0320 16:19:15.565789 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"80d44ced-5f6d-4604-836b-2e58b062b136","Type":"ContainerDied","Data":"af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50"} Mar 20 16:19:15 crc kubenswrapper[4813]: I0320 16:19:15.815032 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:15 crc kubenswrapper[4813]: I0320 16:19:15.845919 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:15 crc kubenswrapper[4813]: I0320 16:19:15.960868 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.098929 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm8xq\" (UniqueName: \"kubernetes.io/projected/fe5c76dc-10d0-4952-a661-427592466850-kube-api-access-zm8xq\") pod \"fe5c76dc-10d0-4952-a661-427592466850\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.099088 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5c76dc-10d0-4952-a661-427592466850-operator-scripts\") pod \"fe5c76dc-10d0-4952-a661-427592466850\" (UID: \"fe5c76dc-10d0-4952-a661-427592466850\") " Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.099787 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe5c76dc-10d0-4952-a661-427592466850-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fe5c76dc-10d0-4952-a661-427592466850" (UID: "fe5c76dc-10d0-4952-a661-427592466850"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.108727 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe5c76dc-10d0-4952-a661-427592466850-kube-api-access-zm8xq" (OuterVolumeSpecName: "kube-api-access-zm8xq") pod "fe5c76dc-10d0-4952-a661-427592466850" (UID: "fe5c76dc-10d0-4952-a661-427592466850"). InnerVolumeSpecName "kube-api-access-zm8xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.200365 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm8xq\" (UniqueName: \"kubernetes.io/projected/fe5c76dc-10d0-4952-a661-427592466850-kube-api-access-zm8xq\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.200397 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5c76dc-10d0-4952-a661-427592466850-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.575910 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" event={"ID":"fe5c76dc-10d0-4952-a661-427592466850","Type":"ContainerDied","Data":"759da470af73ceba3b3a436cfc5c9250f33bfa7298ca8b9b4e12cd4ba6de8f50"} Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.575957 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="759da470af73ceba3b3a436cfc5c9250f33bfa7298ca8b9b4e12cd4ba6de8f50" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.575958 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchere8de-account-delete-4nzjk" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.685437 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-api" probeResult="failure" output="Get \"https://10.217.0.183:9322/\": read tcp 10.217.0.2:59820->10.217.0.183:9322: read: connection reset by peer" Mar 20 16:19:16 crc kubenswrapper[4813]: I0320 16:19:16.685955 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-api" probeResult="failure" output="Get \"https://10.217.0.183:9322/\": dial tcp 10.217.0.183:9322: connect: connection refused" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.139190 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.220539 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-public-tls-certs\") pod \"80d44ced-5f6d-4604-836b-2e58b062b136\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.220654 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-custom-prometheus-ca\") pod \"80d44ced-5f6d-4604-836b-2e58b062b136\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.220687 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-combined-ca-bundle\") pod \"80d44ced-5f6d-4604-836b-2e58b062b136\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.220738 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80d44ced-5f6d-4604-836b-2e58b062b136-logs\") pod \"80d44ced-5f6d-4604-836b-2e58b062b136\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.220777 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data\") pod \"80d44ced-5f6d-4604-836b-2e58b062b136\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.220820 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-internal-tls-certs\") pod \"80d44ced-5f6d-4604-836b-2e58b062b136\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.220860 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzjjh\" (UniqueName: \"kubernetes.io/projected/80d44ced-5f6d-4604-836b-2e58b062b136-kube-api-access-pzjjh\") pod \"80d44ced-5f6d-4604-836b-2e58b062b136\" (UID: \"80d44ced-5f6d-4604-836b-2e58b062b136\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.227799 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80d44ced-5f6d-4604-836b-2e58b062b136-kube-api-access-pzjjh" (OuterVolumeSpecName: "kube-api-access-pzjjh") pod "80d44ced-5f6d-4604-836b-2e58b062b136" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136"). InnerVolumeSpecName "kube-api-access-pzjjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.233925 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80d44ced-5f6d-4604-836b-2e58b062b136-logs" (OuterVolumeSpecName: "logs") pod "80d44ced-5f6d-4604-836b-2e58b062b136" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.276222 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80d44ced-5f6d-4604-836b-2e58b062b136" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.281775 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "80d44ced-5f6d-4604-836b-2e58b062b136" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.292376 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "80d44ced-5f6d-4604-836b-2e58b062b136" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.304541 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "80d44ced-5f6d-4604-836b-2e58b062b136" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.313428 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data" (OuterVolumeSpecName: "config-data") pod "80d44ced-5f6d-4604-836b-2e58b062b136" (UID: "80d44ced-5f6d-4604-836b-2e58b062b136"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.322944 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.322974 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.322986 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80d44ced-5f6d-4604-836b-2e58b062b136-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.322995 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.323003 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.323012 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzjjh\" (UniqueName: \"kubernetes.io/projected/80d44ced-5f6d-4604-836b-2e58b062b136-kube-api-access-pzjjh\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.323023 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80d44ced-5f6d-4604-836b-2e58b062b136-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.367823 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.525742 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv8gc\" (UniqueName: \"kubernetes.io/projected/02ee470b-e994-4934-b0e0-c5410fe0898b-kube-api-access-qv8gc\") pod \"02ee470b-e994-4934-b0e0-c5410fe0898b\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.526056 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-config-data\") pod \"02ee470b-e994-4934-b0e0-c5410fe0898b\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.526077 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-combined-ca-bundle\") pod \"02ee470b-e994-4934-b0e0-c5410fe0898b\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.526216 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02ee470b-e994-4934-b0e0-c5410fe0898b-logs\") pod \"02ee470b-e994-4934-b0e0-c5410fe0898b\" (UID: \"02ee470b-e994-4934-b0e0-c5410fe0898b\") " Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.526916 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02ee470b-e994-4934-b0e0-c5410fe0898b-logs" (OuterVolumeSpecName: "logs") pod "02ee470b-e994-4934-b0e0-c5410fe0898b" (UID: "02ee470b-e994-4934-b0e0-c5410fe0898b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.549533 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02ee470b-e994-4934-b0e0-c5410fe0898b" (UID: "02ee470b-e994-4934-b0e0-c5410fe0898b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.561371 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02ee470b-e994-4934-b0e0-c5410fe0898b-kube-api-access-qv8gc" (OuterVolumeSpecName: "kube-api-access-qv8gc") pod "02ee470b-e994-4934-b0e0-c5410fe0898b" (UID: "02ee470b-e994-4934-b0e0-c5410fe0898b"). InnerVolumeSpecName "kube-api-access-qv8gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.568398 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-config-data" (OuterVolumeSpecName: "config-data") pod "02ee470b-e994-4934-b0e0-c5410fe0898b" (UID: "02ee470b-e994-4934-b0e0-c5410fe0898b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.586034 4813 generic.go:334] "Generic (PLEG): container finished" podID="80d44ced-5f6d-4604-836b-2e58b062b136" containerID="6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6" exitCode=0 Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.586102 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"80d44ced-5f6d-4604-836b-2e58b062b136","Type":"ContainerDied","Data":"6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6"} Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.586133 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"80d44ced-5f6d-4604-836b-2e58b062b136","Type":"ContainerDied","Data":"c5ba3c81a8f379d2e9e217a107097cb2a81bbc4789b96cff0289e5fe35b8f7d8"} Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.586151 4813 scope.go:117] "RemoveContainer" containerID="6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.586305 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.596692 4813 generic.go:334] "Generic (PLEG): container finished" podID="ec987ea4-a423-44c7-bef1-8fc2e0536b1e" containerID="30bd9fd73b1c3fa76d9a32fdf55e461858abdb478020e39d66b3c690f21d083e" exitCode=0 Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.596746 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"ec987ea4-a423-44c7-bef1-8fc2e0536b1e","Type":"ContainerDied","Data":"30bd9fd73b1c3fa76d9a32fdf55e461858abdb478020e39d66b3c690f21d083e"} Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.599911 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerStarted","Data":"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e"} Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.600125 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-central-agent" containerID="cri-o://23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" gracePeriod=30 Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.600149 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="sg-core" containerID="cri-o://94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" gracePeriod=30 Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.600178 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-notification-agent" containerID="cri-o://1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" gracePeriod=30 Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.600187 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="proxy-httpd" containerID="cri-o://b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" gracePeriod=30 Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.600157 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.604636 4813 generic.go:334] "Generic (PLEG): container finished" podID="02ee470b-e994-4934-b0e0-c5410fe0898b" containerID="a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" exitCode=0 Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.604679 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"02ee470b-e994-4934-b0e0-c5410fe0898b","Type":"ContainerDied","Data":"a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98"} Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.604706 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"02ee470b-e994-4934-b0e0-c5410fe0898b","Type":"ContainerDied","Data":"865038d60c8692831e89e96872d22454a762cffeb6db728a3f4e274105dde968"} Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.604761 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.626829 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.628900 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv8gc\" (UniqueName: \"kubernetes.io/projected/02ee470b-e994-4934-b0e0-c5410fe0898b-kube-api-access-qv8gc\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.628928 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.628939 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02ee470b-e994-4934-b0e0-c5410fe0898b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.628949 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02ee470b-e994-4934-b0e0-c5410fe0898b-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.641108 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.649688 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.671975872 podStartE2EDuration="8.649664611s" podCreationTimestamp="2026-03-20 16:19:09 +0000 UTC" firstStartedPulling="2026-03-20 16:19:10.348934508 +0000 UTC m=+2479.771637349" lastFinishedPulling="2026-03-20 16:19:17.326623257 +0000 UTC m=+2486.749326088" observedRunningTime="2026-03-20 16:19:17.639052495 +0000 UTC m=+2487.061755336" watchObservedRunningTime="2026-03-20 16:19:17.649664611 +0000 UTC m=+2487.072367442" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.652842 4813 scope.go:117] "RemoveContainer" containerID="af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.675929 4813 scope.go:117] "RemoveContainer" containerID="6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6" Mar 20 16:19:17 crc kubenswrapper[4813]: E0320 16:19:17.679906 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6\": container with ID starting with 6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6 not found: ID does not exist" containerID="6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.679945 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6"} err="failed to get container status \"6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6\": rpc error: code = NotFound desc = could not find container \"6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6\": container with ID starting with 6040e5c8a4da47d273e66fed41b111d11a77d33dba5d7df89ede09f589f2cfe6 not found: ID does not exist" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.679967 4813 scope.go:117] "RemoveContainer" containerID="af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50" Mar 20 16:19:17 crc kubenswrapper[4813]: E0320 16:19:17.682430 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50\": container with ID starting with af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50 not found: ID does not exist" containerID="af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.682489 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50"} err="failed to get container status \"af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50\": rpc error: code = NotFound desc = could not find container \"af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50\": container with ID starting with af2b6ab74705fe68c6945488b513d5fb3f490b03957dd666826d036ba7594c50 not found: ID does not exist" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.682515 4813 scope.go:117] "RemoveContainer" containerID="a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.683369 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.689312 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.702683 4813 scope.go:117] "RemoveContainer" containerID="a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" Mar 20 16:19:17 crc kubenswrapper[4813]: E0320 16:19:17.708931 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98\": container with ID starting with a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98 not found: ID does not exist" containerID="a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.708991 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98"} err="failed to get container status \"a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98\": rpc error: code = NotFound desc = could not find container \"a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98\": container with ID starting with a4e59bd8d00af041f5adb8533eebc20bd2c9e96a21c369b127d807c742da8a98 not found: ID does not exist" Mar 20 16:19:17 crc kubenswrapper[4813]: I0320 16:19:17.881136 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.021651 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-9kz7w"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.035947 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-custom-prometheus-ca\") pod \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.036087 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-logs\") pod \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.036227 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-config-data\") pod \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.036316 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-combined-ca-bundle\") pod \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.036344 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6pxq\" (UniqueName: \"kubernetes.io/projected/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-kube-api-access-m6pxq\") pod \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\" (UID: \"ec987ea4-a423-44c7-bef1-8fc2e0536b1e\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.037354 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-logs" (OuterVolumeSpecName: "logs") pod "ec987ea4-a423-44c7-bef1-8fc2e0536b1e" (UID: "ec987ea4-a423-44c7-bef1-8fc2e0536b1e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.036227 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-9kz7w"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.047834 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-kube-api-access-m6pxq" (OuterVolumeSpecName: "kube-api-access-m6pxq") pod "ec987ea4-a423-44c7-bef1-8fc2e0536b1e" (UID: "ec987ea4-a423-44c7-bef1-8fc2e0536b1e"). InnerVolumeSpecName "kube-api-access-m6pxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.090760 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-e8de-account-create-update-zzf68"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.098055 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec987ea4-a423-44c7-bef1-8fc2e0536b1e" (UID: "ec987ea4-a423-44c7-bef1-8fc2e0536b1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.098697 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watchere8de-account-delete-4nzjk"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.109143 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "ec987ea4-a423-44c7-bef1-8fc2e0536b1e" (UID: "ec987ea4-a423-44c7-bef1-8fc2e0536b1e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.109356 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watchere8de-account-delete-4nzjk"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.134333 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-e8de-account-create-update-zzf68"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.135824 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-config-data" (OuterVolumeSpecName: "config-data") pod "ec987ea4-a423-44c7-bef1-8fc2e0536b1e" (UID: "ec987ea4-a423-44c7-bef1-8fc2e0536b1e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.144155 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.144184 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.144194 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.144201 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.144210 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6pxq\" (UniqueName: \"kubernetes.io/projected/ec987ea4-a423-44c7-bef1-8fc2e0536b1e-kube-api-access-m6pxq\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.573631 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.614860 4813 generic.go:334] "Generic (PLEG): container finished" podID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerID="b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" exitCode=0 Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.614889 4813 generic.go:334] "Generic (PLEG): container finished" podID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerID="94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" exitCode=2 Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.614897 4813 generic.go:334] "Generic (PLEG): container finished" podID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerID="1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" exitCode=0 Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.614904 4813 generic.go:334] "Generic (PLEG): container finished" podID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerID="23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" exitCode=0 Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.614950 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.614959 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerDied","Data":"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e"} Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.615006 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerDied","Data":"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6"} Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.615018 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerDied","Data":"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97"} Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.615028 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerDied","Data":"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271"} Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.615038 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c4207a82-76eb-404e-8e47-e5241c966fbd","Type":"ContainerDied","Data":"0d50697a7ab8d83479879fe9728bf10380a840aff0978e4d171052149a46a382"} Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.615061 4813 scope.go:117] "RemoveContainer" containerID="b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.616843 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"ec987ea4-a423-44c7-bef1-8fc2e0536b1e","Type":"ContainerDied","Data":"52c4b483d285ba1fcc002bd74c350b6df3478c0ba07e1b8aea68b11380b818c5"} Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.616865 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.638601 4813 scope.go:117] "RemoveContainer" containerID="94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652348 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-config-data\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652500 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-scripts\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652559 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-run-httpd\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652579 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-sg-core-conf-yaml\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qd4m\" (UniqueName: \"kubernetes.io/projected/c4207a82-76eb-404e-8e47-e5241c966fbd-kube-api-access-5qd4m\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652647 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-ceilometer-tls-certs\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652688 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-log-httpd\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652708 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-combined-ca-bundle\") pod \"c4207a82-76eb-404e-8e47-e5241c966fbd\" (UID: \"c4207a82-76eb-404e-8e47-e5241c966fbd\") " Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.652973 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.658340 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.660435 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4207a82-76eb-404e-8e47-e5241c966fbd-kube-api-access-5qd4m" (OuterVolumeSpecName: "kube-api-access-5qd4m") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "kube-api-access-5qd4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.674742 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.678059 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-scripts" (OuterVolumeSpecName: "scripts") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.678095 4813 scope.go:117] "RemoveContainer" containerID="1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.682701 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.684172 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.701320 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.701417 4813 scope.go:117] "RemoveContainer" containerID="23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.719759 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.720047 4813 scope.go:117] "RemoveContainer" containerID="b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.720460 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": container with ID starting with b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e not found: ID does not exist" containerID="b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.720511 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e"} err="failed to get container status \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": rpc error: code = NotFound desc = could not find container \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": container with ID starting with b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.720535 4813 scope.go:117] "RemoveContainer" containerID="94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.720847 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": container with ID starting with 94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6 not found: ID does not exist" containerID="94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.720874 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6"} err="failed to get container status \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": rpc error: code = NotFound desc = could not find container \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": container with ID starting with 94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.720892 4813 scope.go:117] "RemoveContainer" containerID="1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.721172 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": container with ID starting with 1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97 not found: ID does not exist" containerID="1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.721201 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97"} err="failed to get container status \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": rpc error: code = NotFound desc = could not find container \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": container with ID starting with 1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.721255 4813 scope.go:117] "RemoveContainer" containerID="23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.721681 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": container with ID starting with 23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271 not found: ID does not exist" containerID="23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.721738 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271"} err="failed to get container status \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": rpc error: code = NotFound desc = could not find container \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": container with ID starting with 23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.721772 4813 scope.go:117] "RemoveContainer" containerID="b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722103 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e"} err="failed to get container status \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": rpc error: code = NotFound desc = could not find container \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": container with ID starting with b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722131 4813 scope.go:117] "RemoveContainer" containerID="94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722349 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6"} err="failed to get container status \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": rpc error: code = NotFound desc = could not find container \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": container with ID starting with 94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722375 4813 scope.go:117] "RemoveContainer" containerID="1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722603 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97"} err="failed to get container status \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": rpc error: code = NotFound desc = could not find container \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": container with ID starting with 1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722633 4813 scope.go:117] "RemoveContainer" containerID="23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722873 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271"} err="failed to get container status \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": rpc error: code = NotFound desc = could not find container \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": container with ID starting with 23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.722921 4813 scope.go:117] "RemoveContainer" containerID="b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.723268 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e"} err="failed to get container status \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": rpc error: code = NotFound desc = could not find container \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": container with ID starting with b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.723319 4813 scope.go:117] "RemoveContainer" containerID="94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.723569 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6"} err="failed to get container status \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": rpc error: code = NotFound desc = could not find container \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": container with ID starting with 94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.723597 4813 scope.go:117] "RemoveContainer" containerID="1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.724202 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97"} err="failed to get container status \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": rpc error: code = NotFound desc = could not find container \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": container with ID starting with 1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.724231 4813 scope.go:117] "RemoveContainer" containerID="23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.724594 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271"} err="failed to get container status \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": rpc error: code = NotFound desc = could not find container \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": container with ID starting with 23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.724619 4813 scope.go:117] "RemoveContainer" containerID="b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.724880 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e"} err="failed to get container status \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": rpc error: code = NotFound desc = could not find container \"b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e\": container with ID starting with b90fce493311eb7adbdebb1632d5994954f705c3940c9d28c46b2bf250a2660e not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.724903 4813 scope.go:117] "RemoveContainer" containerID="94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.725104 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6"} err="failed to get container status \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": rpc error: code = NotFound desc = could not find container \"94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6\": container with ID starting with 94df8cffb4fa77172bfa2b1fc788c90bf8fb9e0ec8c57203e7cb62294a580ad6 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.725124 4813 scope.go:117] "RemoveContainer" containerID="1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.725319 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97"} err="failed to get container status \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": rpc error: code = NotFound desc = could not find container \"1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97\": container with ID starting with 1d9e06ea1ad3cc38668b1192185a2872d08861e864e3c501c667831a417a0b97 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.725369 4813 scope.go:117] "RemoveContainer" containerID="23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.725621 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271"} err="failed to get container status \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": rpc error: code = NotFound desc = could not find container \"23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271\": container with ID starting with 23308ae839d138a8fa870ab7e44a034c61d20711ab0f9f550d794c56a01bc271 not found: ID does not exist" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.725666 4813 scope.go:117] "RemoveContainer" containerID="30bd9fd73b1c3fa76d9a32fdf55e461858abdb478020e39d66b3c690f21d083e" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.740458 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-config-data" (OuterVolumeSpecName: "config-data") pod "c4207a82-76eb-404e-8e47-e5241c966fbd" (UID: "c4207a82-76eb-404e-8e47-e5241c966fbd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754837 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754869 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754878 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754886 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754898 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qd4m\" (UniqueName: \"kubernetes.io/projected/c4207a82-76eb-404e-8e47-e5241c966fbd-kube-api-access-5qd4m\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754908 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754915 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4207a82-76eb-404e-8e47-e5241c966fbd-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.754924 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4207a82-76eb-404e-8e47-e5241c966fbd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.949786 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.958849 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986210 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986593 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-central-agent" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986612 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-central-agent" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986627 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-notification-agent" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986637 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-notification-agent" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986646 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-kuttl-api-log" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986652 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-kuttl-api-log" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986665 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="sg-core" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986671 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="sg-core" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986682 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5c76dc-10d0-4952-a661-427592466850" containerName="mariadb-account-delete" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986688 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5c76dc-10d0-4952-a661-427592466850" containerName="mariadb-account-delete" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986701 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-api" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986707 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-api" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986715 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="proxy-httpd" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986721 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="proxy-httpd" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986732 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec987ea4-a423-44c7-bef1-8fc2e0536b1e" containerName="watcher-decision-engine" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986739 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec987ea4-a423-44c7-bef1-8fc2e0536b1e" containerName="watcher-decision-engine" Mar 20 16:19:18 crc kubenswrapper[4813]: E0320 16:19:18.986752 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02ee470b-e994-4934-b0e0-c5410fe0898b" containerName="watcher-applier" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986758 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="02ee470b-e994-4934-b0e0-c5410fe0898b" containerName="watcher-applier" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986893 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="02ee470b-e994-4934-b0e0-c5410fe0898b" containerName="watcher-applier" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986905 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe5c76dc-10d0-4952-a661-427592466850" containerName="mariadb-account-delete" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986916 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-kuttl-api-log" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986923 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-notification-agent" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986931 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec987ea4-a423-44c7-bef1-8fc2e0536b1e" containerName="watcher-decision-engine" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986938 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="sg-core" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986948 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="proxy-httpd" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986957 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" containerName="watcher-api" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.986964 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" containerName="ceilometer-central-agent" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.988231 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.990302 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.990888 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.990899 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:19:18 crc kubenswrapper[4813]: I0320 16:19:18.999142 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060418 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96gwd\" (UniqueName: \"kubernetes.io/projected/08549fab-16ed-4e10-8553-077405f1eceb-kube-api-access-96gwd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060471 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060536 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-run-httpd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060565 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-log-httpd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060633 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-config-data\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060657 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060685 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.060728 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-scripts\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162130 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-scripts\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162218 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96gwd\" (UniqueName: \"kubernetes.io/projected/08549fab-16ed-4e10-8553-077405f1eceb-kube-api-access-96gwd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162246 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162298 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-run-httpd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162329 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-log-httpd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162392 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-config-data\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162417 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.162447 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.163839 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-log-httpd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.163900 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-run-httpd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.167721 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.167764 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-config-data\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.171341 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.174910 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.175853 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-scripts\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.179855 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96gwd\" (UniqueName: \"kubernetes.io/projected/08549fab-16ed-4e10-8553-077405f1eceb-kube-api-access-96gwd\") pod \"ceilometer-0\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.273821 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02ee470b-e994-4934-b0e0-c5410fe0898b" path="/var/lib/kubelet/pods/02ee470b-e994-4934-b0e0-c5410fe0898b/volumes" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.274301 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b0891f1-e2fe-40d3-bec6-5306428b0086" path="/var/lib/kubelet/pods/3b0891f1-e2fe-40d3-bec6-5306428b0086/volumes" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.274882 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80d44ced-5f6d-4604-836b-2e58b062b136" path="/var/lib/kubelet/pods/80d44ced-5f6d-4604-836b-2e58b062b136/volumes" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.275881 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4207a82-76eb-404e-8e47-e5241c966fbd" path="/var/lib/kubelet/pods/c4207a82-76eb-404e-8e47-e5241c966fbd/volumes" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.276532 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7e7f714-64b2-4d9c-856d-aaefe3b118cc" path="/var/lib/kubelet/pods/c7e7f714-64b2-4d9c-856d-aaefe3b118cc/volumes" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.277028 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec987ea4-a423-44c7-bef1-8fc2e0536b1e" path="/var/lib/kubelet/pods/ec987ea4-a423-44c7-bef1-8fc2e0536b1e/volumes" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.277990 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe5c76dc-10d0-4952-a661-427592466850" path="/var/lib/kubelet/pods/fe5c76dc-10d0-4952-a661-427592466850/volumes" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.314860 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:19 crc kubenswrapper[4813]: I0320 16:19:19.779246 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.044175 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-lk472"] Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.045326 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.056467 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-92df-account-create-update-7tw4r"] Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.057460 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.058937 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.065200 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-lk472"] Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.082060 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-92df-account-create-update-7tw4r"] Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.179619 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pndcb\" (UniqueName: \"kubernetes.io/projected/19c8777f-603b-48df-b375-555b07680c55-kube-api-access-pndcb\") pod \"watcher-92df-account-create-update-7tw4r\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.179711 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nlqs\" (UniqueName: \"kubernetes.io/projected/230e4fdf-16a8-426a-8b94-906c516a84b5-kube-api-access-5nlqs\") pod \"watcher-db-create-lk472\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.179783 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19c8777f-603b-48df-b375-555b07680c55-operator-scripts\") pod \"watcher-92df-account-create-update-7tw4r\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.179799 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/230e4fdf-16a8-426a-8b94-906c516a84b5-operator-scripts\") pod \"watcher-db-create-lk472\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.282351 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pndcb\" (UniqueName: \"kubernetes.io/projected/19c8777f-603b-48df-b375-555b07680c55-kube-api-access-pndcb\") pod \"watcher-92df-account-create-update-7tw4r\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.282429 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nlqs\" (UniqueName: \"kubernetes.io/projected/230e4fdf-16a8-426a-8b94-906c516a84b5-kube-api-access-5nlqs\") pod \"watcher-db-create-lk472\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.282534 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/230e4fdf-16a8-426a-8b94-906c516a84b5-operator-scripts\") pod \"watcher-db-create-lk472\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.282550 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19c8777f-603b-48df-b375-555b07680c55-operator-scripts\") pod \"watcher-92df-account-create-update-7tw4r\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.283295 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19c8777f-603b-48df-b375-555b07680c55-operator-scripts\") pod \"watcher-92df-account-create-update-7tw4r\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.283343 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/230e4fdf-16a8-426a-8b94-906c516a84b5-operator-scripts\") pod \"watcher-db-create-lk472\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.299059 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nlqs\" (UniqueName: \"kubernetes.io/projected/230e4fdf-16a8-426a-8b94-906c516a84b5-kube-api-access-5nlqs\") pod \"watcher-db-create-lk472\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.299703 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pndcb\" (UniqueName: \"kubernetes.io/projected/19c8777f-603b-48df-b375-555b07680c55-kube-api-access-pndcb\") pod \"watcher-92df-account-create-update-7tw4r\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.410636 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.420725 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.694133 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerStarted","Data":"849e0eef4c2268b33707cb39ea946405f704e0a892e7eabce88c262b5394e039"} Mar 20 16:19:20 crc kubenswrapper[4813]: I0320 16:19:20.694387 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerStarted","Data":"ead09ac12a74f960626c5c47f1dea46d0e0c17b3a70bcef846485751ffcea175"} Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.070045 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-lk472"] Mar 20 16:19:21 crc kubenswrapper[4813]: W0320 16:19:21.070271 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod230e4fdf_16a8_426a_8b94_906c516a84b5.slice/crio-2bae54ef6fc65273c48bb81e3a7388acc91996ab6c38b0da3c8633a7b48fa9cb WatchSource:0}: Error finding container 2bae54ef6fc65273c48bb81e3a7388acc91996ab6c38b0da3c8633a7b48fa9cb: Status 404 returned error can't find the container with id 2bae54ef6fc65273c48bb81e3a7388acc91996ab6c38b0da3c8633a7b48fa9cb Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.148370 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-92df-account-create-update-7tw4r"] Mar 20 16:19:21 crc kubenswrapper[4813]: W0320 16:19:21.148660 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19c8777f_603b_48df_b375_555b07680c55.slice/crio-613114df67436e4bc87c89d5c1b368843476dddb66693cc55444705db9c4f46a WatchSource:0}: Error finding container 613114df67436e4bc87c89d5c1b368843476dddb66693cc55444705db9c4f46a: Status 404 returned error can't find the container with id 613114df67436e4bc87c89d5c1b368843476dddb66693cc55444705db9c4f46a Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.703577 4813 generic.go:334] "Generic (PLEG): container finished" podID="230e4fdf-16a8-426a-8b94-906c516a84b5" containerID="3f2f7fa5fd73f0e223b1fe70cef7f1e483930a59da4e0f3b5952864956d4bc4b" exitCode=0 Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.703619 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-lk472" event={"ID":"230e4fdf-16a8-426a-8b94-906c516a84b5","Type":"ContainerDied","Data":"3f2f7fa5fd73f0e223b1fe70cef7f1e483930a59da4e0f3b5952864956d4bc4b"} Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.703988 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-lk472" event={"ID":"230e4fdf-16a8-426a-8b94-906c516a84b5","Type":"ContainerStarted","Data":"2bae54ef6fc65273c48bb81e3a7388acc91996ab6c38b0da3c8633a7b48fa9cb"} Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.706519 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerStarted","Data":"978a092aeb40727c459f0f0b9e554d78a5d5b366fbe77d216ffe812bf900e25e"} Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.707572 4813 generic.go:334] "Generic (PLEG): container finished" podID="19c8777f-603b-48df-b375-555b07680c55" containerID="342691f8df7c1247bae78fae0011f55b4bb8ea584b388238d2ac73a5066d86a8" exitCode=0 Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.707610 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" event={"ID":"19c8777f-603b-48df-b375-555b07680c55","Type":"ContainerDied","Data":"342691f8df7c1247bae78fae0011f55b4bb8ea584b388238d2ac73a5066d86a8"} Mar 20 16:19:21 crc kubenswrapper[4813]: I0320 16:19:21.707634 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" event={"ID":"19c8777f-603b-48df-b375-555b07680c55","Type":"ContainerStarted","Data":"613114df67436e4bc87c89d5c1b368843476dddb66693cc55444705db9c4f46a"} Mar 20 16:19:22 crc kubenswrapper[4813]: I0320 16:19:22.719858 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerStarted","Data":"9c7b274aa4fe8c10031bfef537bdc83afe0fa1954b4733e8585f060d53b2c3ac"} Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.197799 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.278318 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.347911 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pndcb\" (UniqueName: \"kubernetes.io/projected/19c8777f-603b-48df-b375-555b07680c55-kube-api-access-pndcb\") pod \"19c8777f-603b-48df-b375-555b07680c55\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.348093 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nlqs\" (UniqueName: \"kubernetes.io/projected/230e4fdf-16a8-426a-8b94-906c516a84b5-kube-api-access-5nlqs\") pod \"230e4fdf-16a8-426a-8b94-906c516a84b5\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.348122 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/230e4fdf-16a8-426a-8b94-906c516a84b5-operator-scripts\") pod \"230e4fdf-16a8-426a-8b94-906c516a84b5\" (UID: \"230e4fdf-16a8-426a-8b94-906c516a84b5\") " Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.348154 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19c8777f-603b-48df-b375-555b07680c55-operator-scripts\") pod \"19c8777f-603b-48df-b375-555b07680c55\" (UID: \"19c8777f-603b-48df-b375-555b07680c55\") " Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.349690 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/230e4fdf-16a8-426a-8b94-906c516a84b5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "230e4fdf-16a8-426a-8b94-906c516a84b5" (UID: "230e4fdf-16a8-426a-8b94-906c516a84b5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.349829 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19c8777f-603b-48df-b375-555b07680c55-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19c8777f-603b-48df-b375-555b07680c55" (UID: "19c8777f-603b-48df-b375-555b07680c55"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.350064 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/230e4fdf-16a8-426a-8b94-906c516a84b5-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.350090 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19c8777f-603b-48df-b375-555b07680c55-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.351367 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19c8777f-603b-48df-b375-555b07680c55-kube-api-access-pndcb" (OuterVolumeSpecName: "kube-api-access-pndcb") pod "19c8777f-603b-48df-b375-555b07680c55" (UID: "19c8777f-603b-48df-b375-555b07680c55"). InnerVolumeSpecName "kube-api-access-pndcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.356919 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/230e4fdf-16a8-426a-8b94-906c516a84b5-kube-api-access-5nlqs" (OuterVolumeSpecName: "kube-api-access-5nlqs") pod "230e4fdf-16a8-426a-8b94-906c516a84b5" (UID: "230e4fdf-16a8-426a-8b94-906c516a84b5"). InnerVolumeSpecName "kube-api-access-5nlqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.451320 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nlqs\" (UniqueName: \"kubernetes.io/projected/230e4fdf-16a8-426a-8b94-906c516a84b5-kube-api-access-5nlqs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.451353 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pndcb\" (UniqueName: \"kubernetes.io/projected/19c8777f-603b-48df-b375-555b07680c55-kube-api-access-pndcb\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.727318 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" event={"ID":"19c8777f-603b-48df-b375-555b07680c55","Type":"ContainerDied","Data":"613114df67436e4bc87c89d5c1b368843476dddb66693cc55444705db9c4f46a"} Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.727696 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="613114df67436e4bc87c89d5c1b368843476dddb66693cc55444705db9c4f46a" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.727332 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-92df-account-create-update-7tw4r" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.729377 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-lk472" event={"ID":"230e4fdf-16a8-426a-8b94-906c516a84b5","Type":"ContainerDied","Data":"2bae54ef6fc65273c48bb81e3a7388acc91996ab6c38b0da3c8633a7b48fa9cb"} Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.729405 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bae54ef6fc65273c48bb81e3a7388acc91996ab6c38b0da3c8633a7b48fa9cb" Mar 20 16:19:23 crc kubenswrapper[4813]: I0320 16:19:23.729407 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-lk472" Mar 20 16:19:24 crc kubenswrapper[4813]: I0320 16:19:24.739186 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerStarted","Data":"b36309e9da41fcec3eaeaa540fba7f5cd5ba7933f03450f1e2a21776b3b39801"} Mar 20 16:19:24 crc kubenswrapper[4813]: I0320 16:19:24.739550 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:24 crc kubenswrapper[4813]: I0320 16:19:24.764795 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.620931434 podStartE2EDuration="6.764771007s" podCreationTimestamp="2026-03-20 16:19:18 +0000 UTC" firstStartedPulling="2026-03-20 16:19:19.786257978 +0000 UTC m=+2489.208960819" lastFinishedPulling="2026-03-20 16:19:23.930097511 +0000 UTC m=+2493.352800392" observedRunningTime="2026-03-20 16:19:24.761438108 +0000 UTC m=+2494.184140949" watchObservedRunningTime="2026-03-20 16:19:24.764771007 +0000 UTC m=+2494.187473848" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.379870 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw"] Mar 20 16:19:25 crc kubenswrapper[4813]: E0320 16:19:25.380186 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c8777f-603b-48df-b375-555b07680c55" containerName="mariadb-account-create-update" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.380203 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c8777f-603b-48df-b375-555b07680c55" containerName="mariadb-account-create-update" Mar 20 16:19:25 crc kubenswrapper[4813]: E0320 16:19:25.380211 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="230e4fdf-16a8-426a-8b94-906c516a84b5" containerName="mariadb-database-create" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.380217 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="230e4fdf-16a8-426a-8b94-906c516a84b5" containerName="mariadb-database-create" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.380364 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="19c8777f-603b-48df-b375-555b07680c55" containerName="mariadb-account-create-update" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.380378 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="230e4fdf-16a8-426a-8b94-906c516a84b5" containerName="mariadb-database-create" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.380914 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.383939 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-pflsw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.390953 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw"] Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.391131 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.488469 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-db-sync-config-data\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.488528 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmj7m\" (UniqueName: \"kubernetes.io/projected/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-kube-api-access-zmj7m\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.488559 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.488618 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-config-data\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.590327 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-config-data\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.590459 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-db-sync-config-data\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.590533 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmj7m\" (UniqueName: \"kubernetes.io/projected/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-kube-api-access-zmj7m\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.590556 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.595602 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-db-sync-config-data\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.597759 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.599887 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-config-data\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.618512 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmj7m\" (UniqueName: \"kubernetes.io/projected/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-kube-api-access-zmj7m\") pod \"watcher-kuttl-db-sync-pcmhw\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:25 crc kubenswrapper[4813]: I0320 16:19:25.696767 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:26 crc kubenswrapper[4813]: I0320 16:19:26.185836 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw"] Mar 20 16:19:26 crc kubenswrapper[4813]: W0320 16:19:26.195911 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd5cfafd_67cb_47e4_a9b0_739d95b77a6b.slice/crio-81d127d950ea8dded952431041fb563e5f218740e81b4c8ce1cfc7636771a3b6 WatchSource:0}: Error finding container 81d127d950ea8dded952431041fb563e5f218740e81b4c8ce1cfc7636771a3b6: Status 404 returned error can't find the container with id 81d127d950ea8dded952431041fb563e5f218740e81b4c8ce1cfc7636771a3b6 Mar 20 16:19:26 crc kubenswrapper[4813]: I0320 16:19:26.762286 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" event={"ID":"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b","Type":"ContainerStarted","Data":"8db7dae88f5f923d7e7b661f9bccf90056fe91a660d79e798e61379884ea622f"} Mar 20 16:19:26 crc kubenswrapper[4813]: I0320 16:19:26.762933 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" event={"ID":"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b","Type":"ContainerStarted","Data":"81d127d950ea8dded952431041fb563e5f218740e81b4c8ce1cfc7636771a3b6"} Mar 20 16:19:28 crc kubenswrapper[4813]: I0320 16:19:28.779835 4813 generic.go:334] "Generic (PLEG): container finished" podID="bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" containerID="8db7dae88f5f923d7e7b661f9bccf90056fe91a660d79e798e61379884ea622f" exitCode=0 Mar 20 16:19:28 crc kubenswrapper[4813]: I0320 16:19:28.780027 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" event={"ID":"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b","Type":"ContainerDied","Data":"8db7dae88f5f923d7e7b661f9bccf90056fe91a660d79e798e61379884ea622f"} Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.152911 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.266418 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-config-data\") pod \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.266626 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-db-sync-config-data\") pod \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.266686 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-combined-ca-bundle\") pod \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.266729 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmj7m\" (UniqueName: \"kubernetes.io/projected/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-kube-api-access-zmj7m\") pod \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\" (UID: \"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b\") " Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.271974 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-kube-api-access-zmj7m" (OuterVolumeSpecName: "kube-api-access-zmj7m") pod "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" (UID: "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b"). InnerVolumeSpecName "kube-api-access-zmj7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.289016 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" (UID: "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.295867 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" (UID: "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.308474 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-config-data" (OuterVolumeSpecName: "config-data") pod "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" (UID: "bd5cfafd-67cb-47e4-a9b0-739d95b77a6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.369860 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.369894 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmj7m\" (UniqueName: \"kubernetes.io/projected/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-kube-api-access-zmj7m\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.369909 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.369922 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.801969 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" event={"ID":"bd5cfafd-67cb-47e4-a9b0-739d95b77a6b","Type":"ContainerDied","Data":"81d127d950ea8dded952431041fb563e5f218740e81b4c8ce1cfc7636771a3b6"} Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.802017 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81d127d950ea8dded952431041fb563e5f218740e81b4c8ce1cfc7636771a3b6" Mar 20 16:19:30 crc kubenswrapper[4813]: I0320 16:19:30.802123 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.094971 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: E0320 16:19:31.095253 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" containerName="watcher-kuttl-db-sync" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.095269 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" containerName="watcher-kuttl-db-sync" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.095423 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" containerName="watcher-kuttl-db-sync" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.095940 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.098423 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-pflsw" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.103470 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.108412 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.117471 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.119307 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.121592 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-public-svc" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.121797 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-internal-svc" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.127321 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.138584 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184001 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184049 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ea44c55-d972-4299-9b77-b3569cd2cbf8-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184085 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184116 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb2dk\" (UniqueName: \"kubernetes.io/projected/3ea44c55-d972-4299-9b77-b3569cd2cbf8-kube-api-access-rb2dk\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184141 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184313 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184388 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft5wt\" (UniqueName: \"kubernetes.io/projected/3cedd20e-fadd-4960-be1b-0ae6797437d0-kube-api-access-ft5wt\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184540 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184584 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184625 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.184682 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cedd20e-fadd-4960-be1b-0ae6797437d0-logs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.206466 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.207725 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.213012 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.231126 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286346 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cedd20e-fadd-4960-be1b-0ae6797437d0-logs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286621 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe5fa8d-cd06-4d52-b290-1f57db49a360-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286656 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286679 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ea44c55-d972-4299-9b77-b3569cd2cbf8-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286704 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb2dk\" (UniqueName: \"kubernetes.io/projected/3ea44c55-d972-4299-9b77-b3569cd2cbf8-kube-api-access-rb2dk\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286766 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286790 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286812 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw74x\" (UniqueName: \"kubernetes.io/projected/2fe5fa8d-cd06-4d52-b290-1f57db49a360-kube-api-access-nw74x\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286834 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286861 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft5wt\" (UniqueName: \"kubernetes.io/projected/3cedd20e-fadd-4960-be1b-0ae6797437d0-kube-api-access-ft5wt\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286894 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286912 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286935 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286957 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.286977 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.287094 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ea44c55-d972-4299-9b77-b3569cd2cbf8-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.287592 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cedd20e-fadd-4960-be1b-0ae6797437d0-logs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.291026 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.292098 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.293916 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.296996 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.297746 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.302947 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.303344 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.304319 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft5wt\" (UniqueName: \"kubernetes.io/projected/3cedd20e-fadd-4960-be1b-0ae6797437d0-kube-api-access-ft5wt\") pod \"watcher-kuttl-api-0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.305207 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb2dk\" (UniqueName: \"kubernetes.io/projected/3ea44c55-d972-4299-9b77-b3569cd2cbf8-kube-api-access-rb2dk\") pod \"watcher-kuttl-applier-0\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.388633 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe5fa8d-cd06-4d52-b290-1f57db49a360-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.388693 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.388715 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw74x\" (UniqueName: \"kubernetes.io/projected/2fe5fa8d-cd06-4d52-b290-1f57db49a360-kube-api-access-nw74x\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.388769 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.388785 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.389451 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe5fa8d-cd06-4d52-b290-1f57db49a360-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.392114 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.392296 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.392891 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.408764 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw74x\" (UniqueName: \"kubernetes.io/projected/2fe5fa8d-cd06-4d52-b290-1f57db49a360-kube-api-access-nw74x\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.451930 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.456443 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.524293 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.929368 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: I0320 16:19:31.940344 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:31 crc kubenswrapper[4813]: W0320 16:19:31.949213 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3cedd20e_fadd_4960_be1b_0ae6797437d0.slice/crio-a9212c19a8112cc51f0ab469608db89daa5cc401ee66c2fe98897ab271dbdb0c WatchSource:0}: Error finding container a9212c19a8112cc51f0ab469608db89daa5cc401ee66c2fe98897ab271dbdb0c: Status 404 returned error can't find the container with id a9212c19a8112cc51f0ab469608db89daa5cc401ee66c2fe98897ab271dbdb0c Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.091564 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:19:32 crc kubenswrapper[4813]: W0320 16:19:32.093598 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fe5fa8d_cd06_4d52_b290_1f57db49a360.slice/crio-e1b735b3367011a2e8fbd81a976c730e2f0e68eb10de54a0b7a2fe235f80c913 WatchSource:0}: Error finding container e1b735b3367011a2e8fbd81a976c730e2f0e68eb10de54a0b7a2fe235f80c913: Status 404 returned error can't find the container with id e1b735b3367011a2e8fbd81a976c730e2f0e68eb10de54a0b7a2fe235f80c913 Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.820312 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"2fe5fa8d-cd06-4d52-b290-1f57db49a360","Type":"ContainerStarted","Data":"c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66"} Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.820620 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"2fe5fa8d-cd06-4d52-b290-1f57db49a360","Type":"ContainerStarted","Data":"e1b735b3367011a2e8fbd81a976c730e2f0e68eb10de54a0b7a2fe235f80c913"} Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.821899 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"3ea44c55-d972-4299-9b77-b3569cd2cbf8","Type":"ContainerStarted","Data":"21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae"} Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.821922 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"3ea44c55-d972-4299-9b77-b3569cd2cbf8","Type":"ContainerStarted","Data":"033d321c88d11bc4633ec7a9907de07a11ee0eb249139a0744ca697c86e0980c"} Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.823756 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"3cedd20e-fadd-4960-be1b-0ae6797437d0","Type":"ContainerStarted","Data":"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32"} Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.823806 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"3cedd20e-fadd-4960-be1b-0ae6797437d0","Type":"ContainerStarted","Data":"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2"} Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.823821 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"3cedd20e-fadd-4960-be1b-0ae6797437d0","Type":"ContainerStarted","Data":"a9212c19a8112cc51f0ab469608db89daa5cc401ee66c2fe98897ab271dbdb0c"} Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.824961 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.900528 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=1.9005111860000001 podStartE2EDuration="1.900511186s" podCreationTimestamp="2026-03-20 16:19:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:19:32.894794562 +0000 UTC m=+2502.317497403" watchObservedRunningTime="2026-03-20 16:19:32.900511186 +0000 UTC m=+2502.323214017" Mar 20 16:19:32 crc kubenswrapper[4813]: I0320 16:19:32.901420 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=1.9014149310000001 podStartE2EDuration="1.901414931s" podCreationTimestamp="2026-03-20 16:19:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:19:32.867845135 +0000 UTC m=+2502.290547976" watchObservedRunningTime="2026-03-20 16:19:32.901414931 +0000 UTC m=+2502.324117772" Mar 20 16:19:33 crc kubenswrapper[4813]: I0320 16:19:33.842176 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:19:33 crc kubenswrapper[4813]: I0320 16:19:33.842389 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:19:33 crc kubenswrapper[4813]: I0320 16:19:33.842423 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 16:19:33 crc kubenswrapper[4813]: I0320 16:19:33.842819 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 16:19:33 crc kubenswrapper[4813]: I0320 16:19:33.842862 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" gracePeriod=600 Mar 20 16:19:34 crc kubenswrapper[4813]: E0320 16:19:34.065669 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:19:34 crc kubenswrapper[4813]: I0320 16:19:34.842172 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" exitCode=0 Mar 20 16:19:34 crc kubenswrapper[4813]: I0320 16:19:34.842261 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2"} Mar 20 16:19:34 crc kubenswrapper[4813]: I0320 16:19:34.842573 4813 scope.go:117] "RemoveContainer" containerID="2c8b835c39ed1f8951bddf885feb47fab28112a96e06e4846419e61cc8cce03c" Mar 20 16:19:34 crc kubenswrapper[4813]: I0320 16:19:34.843150 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 16:19:34 crc kubenswrapper[4813]: I0320 16:19:34.843879 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:19:34 crc kubenswrapper[4813]: E0320 16:19:34.844396 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:19:34 crc kubenswrapper[4813]: I0320 16:19:34.863679 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=3.863655853 podStartE2EDuration="3.863655853s" podCreationTimestamp="2026-03-20 16:19:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:19:32.921808221 +0000 UTC m=+2502.344511062" watchObservedRunningTime="2026-03-20 16:19:34.863655853 +0000 UTC m=+2504.286358694" Mar 20 16:19:35 crc kubenswrapper[4813]: I0320 16:19:35.418049 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:36 crc kubenswrapper[4813]: I0320 16:19:36.452728 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:36 crc kubenswrapper[4813]: I0320 16:19:36.456763 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.452746 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.456789 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.471838 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.500813 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.524895 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.546382 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.904570 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.928926 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.929784 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:19:41 crc kubenswrapper[4813]: I0320 16:19:41.936803 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.198674 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.199986 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="sg-core" containerID="cri-o://9c7b274aa4fe8c10031bfef537bdc83afe0fa1954b4733e8585f060d53b2c3ac" gracePeriod=30 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.200157 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-notification-agent" containerID="cri-o://978a092aeb40727c459f0f0b9e554d78a5d5b366fbe77d216ffe812bf900e25e" gracePeriod=30 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.200163 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="proxy-httpd" containerID="cri-o://b36309e9da41fcec3eaeaa540fba7f5cd5ba7933f03450f1e2a21776b3b39801" gracePeriod=30 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.200454 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-central-agent" containerID="cri-o://849e0eef4c2268b33707cb39ea946405f704e0a892e7eabce88c262b5394e039" gracePeriod=30 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.216462 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.959832 4813 generic.go:334] "Generic (PLEG): container finished" podID="08549fab-16ed-4e10-8553-077405f1eceb" containerID="b36309e9da41fcec3eaeaa540fba7f5cd5ba7933f03450f1e2a21776b3b39801" exitCode=0 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.960400 4813 generic.go:334] "Generic (PLEG): container finished" podID="08549fab-16ed-4e10-8553-077405f1eceb" containerID="9c7b274aa4fe8c10031bfef537bdc83afe0fa1954b4733e8585f060d53b2c3ac" exitCode=2 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.960412 4813 generic.go:334] "Generic (PLEG): container finished" podID="08549fab-16ed-4e10-8553-077405f1eceb" containerID="978a092aeb40727c459f0f0b9e554d78a5d5b366fbe77d216ffe812bf900e25e" exitCode=0 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.960420 4813 generic.go:334] "Generic (PLEG): container finished" podID="08549fab-16ed-4e10-8553-077405f1eceb" containerID="849e0eef4c2268b33707cb39ea946405f704e0a892e7eabce88c262b5394e039" exitCode=0 Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.960670 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerDied","Data":"b36309e9da41fcec3eaeaa540fba7f5cd5ba7933f03450f1e2a21776b3b39801"} Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.960711 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerDied","Data":"9c7b274aa4fe8c10031bfef537bdc83afe0fa1954b4733e8585f060d53b2c3ac"} Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.960723 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerDied","Data":"978a092aeb40727c459f0f0b9e554d78a5d5b366fbe77d216ffe812bf900e25e"} Mar 20 16:19:43 crc kubenswrapper[4813]: I0320 16:19:43.960734 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerDied","Data":"849e0eef4c2268b33707cb39ea946405f704e0a892e7eabce88c262b5394e039"} Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.048659 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.105669 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-log-httpd\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.105781 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-scripts\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.105813 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-config-data\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.105848 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-combined-ca-bundle\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.105930 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-sg-core-conf-yaml\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.105963 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96gwd\" (UniqueName: \"kubernetes.io/projected/08549fab-16ed-4e10-8553-077405f1eceb-kube-api-access-96gwd\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.106012 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-run-httpd\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.106045 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-ceilometer-tls-certs\") pod \"08549fab-16ed-4e10-8553-077405f1eceb\" (UID: \"08549fab-16ed-4e10-8553-077405f1eceb\") " Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.107340 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.109691 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.112419 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-scripts" (OuterVolumeSpecName: "scripts") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.133737 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08549fab-16ed-4e10-8553-077405f1eceb-kube-api-access-96gwd" (OuterVolumeSpecName: "kube-api-access-96gwd") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "kube-api-access-96gwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.149673 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.182829 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.198464 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.198748 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-config-data" (OuterVolumeSpecName: "config-data") pod "08549fab-16ed-4e10-8553-077405f1eceb" (UID: "08549fab-16ed-4e10-8553-077405f1eceb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207445 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207472 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207495 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207504 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207514 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207522 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96gwd\" (UniqueName: \"kubernetes.io/projected/08549fab-16ed-4e10-8553-077405f1eceb-kube-api-access-96gwd\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207530 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08549fab-16ed-4e10-8553-077405f1eceb-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.207538 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08549fab-16ed-4e10-8553-077405f1eceb-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.970705 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"08549fab-16ed-4e10-8553-077405f1eceb","Type":"ContainerDied","Data":"ead09ac12a74f960626c5c47f1dea46d0e0c17b3a70bcef846485751ffcea175"} Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.970995 4813 scope.go:117] "RemoveContainer" containerID="b36309e9da41fcec3eaeaa540fba7f5cd5ba7933f03450f1e2a21776b3b39801" Mar 20 16:19:44 crc kubenswrapper[4813]: I0320 16:19:44.970794 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.004899 4813 scope.go:117] "RemoveContainer" containerID="9c7b274aa4fe8c10031bfef537bdc83afe0fa1954b4733e8585f060d53b2c3ac" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.014140 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.025363 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.032630 4813 scope.go:117] "RemoveContainer" containerID="978a092aeb40727c459f0f0b9e554d78a5d5b366fbe77d216ffe812bf900e25e" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049279 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:45 crc kubenswrapper[4813]: E0320 16:19:45.049752 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="proxy-httpd" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049778 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="proxy-httpd" Mar 20 16:19:45 crc kubenswrapper[4813]: E0320 16:19:45.049789 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-central-agent" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049796 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-central-agent" Mar 20 16:19:45 crc kubenswrapper[4813]: E0320 16:19:45.049804 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="sg-core" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049811 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="sg-core" Mar 20 16:19:45 crc kubenswrapper[4813]: E0320 16:19:45.049835 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-notification-agent" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049841 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-notification-agent" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049972 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-notification-agent" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049982 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="ceilometer-central-agent" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.049998 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="proxy-httpd" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.050006 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="08549fab-16ed-4e10-8553-077405f1eceb" containerName="sg-core" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.051675 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.054439 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.054774 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.054820 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.060725 4813 scope.go:117] "RemoveContainer" containerID="849e0eef4c2268b33707cb39ea946405f704e0a892e7eabce88c262b5394e039" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.071852 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.122354 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.122399 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-scripts\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.122424 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-run-httpd\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.122642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-log-httpd\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.122784 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrbqt\" (UniqueName: \"kubernetes.io/projected/74f77e2d-fc3c-481e-b858-fd507c576f32-kube-api-access-zrbqt\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.122857 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-config-data\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.122960 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.123077 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.224991 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.225063 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.225099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-scripts\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.225125 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-run-httpd\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.225168 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-log-httpd\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.225211 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrbqt\" (UniqueName: \"kubernetes.io/projected/74f77e2d-fc3c-481e-b858-fd507c576f32-kube-api-access-zrbqt\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.225243 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-config-data\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.225277 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.226157 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-log-httpd\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.226157 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-run-httpd\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.229892 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.230272 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.241830 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-config-data\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.242225 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.242314 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-scripts\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.250152 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrbqt\" (UniqueName: \"kubernetes.io/projected/74f77e2d-fc3c-481e-b858-fd507c576f32-kube-api-access-zrbqt\") pod \"ceilometer-0\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.276769 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08549fab-16ed-4e10-8553-077405f1eceb" path="/var/lib/kubelet/pods/08549fab-16ed-4e10-8553-077405f1eceb/volumes" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.372915 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.839290 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:19:45 crc kubenswrapper[4813]: W0320 16:19:45.844427 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74f77e2d_fc3c_481e_b858_fd507c576f32.slice/crio-cac1ee68f731bee1f0b4b5cf81f5f75f7b6aa7ab3dfa47162cd1865f7aeb80b4 WatchSource:0}: Error finding container cac1ee68f731bee1f0b4b5cf81f5f75f7b6aa7ab3dfa47162cd1865f7aeb80b4: Status 404 returned error can't find the container with id cac1ee68f731bee1f0b4b5cf81f5f75f7b6aa7ab3dfa47162cd1865f7aeb80b4 Mar 20 16:19:45 crc kubenswrapper[4813]: I0320 16:19:45.977925 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerStarted","Data":"cac1ee68f731bee1f0b4b5cf81f5f75f7b6aa7ab3dfa47162cd1865f7aeb80b4"} Mar 20 16:19:46 crc kubenswrapper[4813]: I0320 16:19:46.985772 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerStarted","Data":"af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca"} Mar 20 16:19:47 crc kubenswrapper[4813]: I0320 16:19:47.266247 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:19:47 crc kubenswrapper[4813]: E0320 16:19:47.266847 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:19:47 crc kubenswrapper[4813]: I0320 16:19:47.997573 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerStarted","Data":"8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc"} Mar 20 16:19:49 crc kubenswrapper[4813]: I0320 16:19:49.005632 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerStarted","Data":"49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd"} Mar 20 16:19:51 crc kubenswrapper[4813]: I0320 16:19:51.022608 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerStarted","Data":"f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42"} Mar 20 16:19:51 crc kubenswrapper[4813]: I0320 16:19:51.023057 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:19:51 crc kubenswrapper[4813]: I0320 16:19:51.048442 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.375578837 podStartE2EDuration="6.048420141s" podCreationTimestamp="2026-03-20 16:19:45 +0000 UTC" firstStartedPulling="2026-03-20 16:19:45.846531346 +0000 UTC m=+2515.269234187" lastFinishedPulling="2026-03-20 16:19:50.51937264 +0000 UTC m=+2519.942075491" observedRunningTime="2026-03-20 16:19:51.047097666 +0000 UTC m=+2520.469800517" watchObservedRunningTime="2026-03-20 16:19:51.048420141 +0000 UTC m=+2520.471122982" Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.861888 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.862730 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="3ea44c55-d972-4299-9b77-b3569cd2cbf8" containerName="watcher-applier" containerID="cri-o://21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" gracePeriod=30 Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.871806 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.872020 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="2fe5fa8d-cd06-4d52-b290-1f57db49a360" containerName="watcher-decision-engine" containerID="cri-o://c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66" gracePeriod=30 Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.890439 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.890682 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/memcached-0" podUID="925d9de3-2778-411a-8e7a-2af03ebc8439" containerName="memcached" containerID="cri-o://a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d" gracePeriod=30 Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.966208 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.966524 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-kuttl-api-log" containerID="cri-o://ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2" gracePeriod=30 Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.966676 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-api" containerID="cri-o://233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32" gracePeriod=30 Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.976948 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-z6ggq"] Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.977990 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.981437 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-memcached-mtls" Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.981442 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"osp-secret" Mar 20 16:19:55 crc kubenswrapper[4813]: I0320 16:19:55.985197 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-z6ggq"] Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.128730 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-credential-keys\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.129113 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-cert-memcached-mtls\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.129172 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-combined-ca-bundle\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.129198 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjb5s\" (UniqueName: \"kubernetes.io/projected/10bd89d7-817c-468e-bf64-fe24b73ea2a2-kube-api-access-vjb5s\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.129266 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-scripts\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.129323 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-config-data\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.129379 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-fernet-keys\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.230990 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-credential-keys\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.231041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-cert-memcached-mtls\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.231094 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-combined-ca-bundle\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.231116 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjb5s\" (UniqueName: \"kubernetes.io/projected/10bd89d7-817c-468e-bf64-fe24b73ea2a2-kube-api-access-vjb5s\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.231181 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-scripts\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.231222 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-config-data\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.231268 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-fernet-keys\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.238581 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-scripts\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.238817 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-credential-keys\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.239398 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-fernet-keys\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.239960 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-cert-memcached-mtls\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.241699 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-combined-ca-bundle\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.251275 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjb5s\" (UniqueName: \"kubernetes.io/projected/10bd89d7-817c-468e-bf64-fe24b73ea2a2-kube-api-access-vjb5s\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.257085 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-config-data\") pod \"keystone-bootstrap-z6ggq\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.293809 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:19:56 crc kubenswrapper[4813]: E0320 16:19:56.459161 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:19:56 crc kubenswrapper[4813]: E0320 16:19:56.462590 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:19:56 crc kubenswrapper[4813]: E0320 16:19:56.469293 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:19:56 crc kubenswrapper[4813]: E0320 16:19:56.469370 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="3ea44c55-d972-4299-9b77-b3569cd2cbf8" containerName="watcher-applier" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.508937 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-kuttl-api-log" probeResult="failure" output="Get \"https://10.217.0.190:9322/\": read tcp 10.217.0.2:57792->10.217.0.190:9322: read: connection reset by peer" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.509030 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-api" probeResult="failure" output="Get \"https://10.217.0.190:9322/\": read tcp 10.217.0.2:57776->10.217.0.190:9322: read: connection reset by peer" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.762475 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-z6ggq"] Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.871820 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.944400 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-public-tls-certs\") pod \"3cedd20e-fadd-4960-be1b-0ae6797437d0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.944826 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cedd20e-fadd-4960-be1b-0ae6797437d0-logs\") pod \"3cedd20e-fadd-4960-be1b-0ae6797437d0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.944910 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft5wt\" (UniqueName: \"kubernetes.io/projected/3cedd20e-fadd-4960-be1b-0ae6797437d0-kube-api-access-ft5wt\") pod \"3cedd20e-fadd-4960-be1b-0ae6797437d0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.944961 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-internal-tls-certs\") pod \"3cedd20e-fadd-4960-be1b-0ae6797437d0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.944995 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-config-data\") pod \"3cedd20e-fadd-4960-be1b-0ae6797437d0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.945091 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-combined-ca-bundle\") pod \"3cedd20e-fadd-4960-be1b-0ae6797437d0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.945135 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-custom-prometheus-ca\") pod \"3cedd20e-fadd-4960-be1b-0ae6797437d0\" (UID: \"3cedd20e-fadd-4960-be1b-0ae6797437d0\") " Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.946862 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cedd20e-fadd-4960-be1b-0ae6797437d0-logs" (OuterVolumeSpecName: "logs") pod "3cedd20e-fadd-4960-be1b-0ae6797437d0" (UID: "3cedd20e-fadd-4960-be1b-0ae6797437d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.954746 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cedd20e-fadd-4960-be1b-0ae6797437d0-kube-api-access-ft5wt" (OuterVolumeSpecName: "kube-api-access-ft5wt") pod "3cedd20e-fadd-4960-be1b-0ae6797437d0" (UID: "3cedd20e-fadd-4960-be1b-0ae6797437d0"). InnerVolumeSpecName "kube-api-access-ft5wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.979020 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "3cedd20e-fadd-4960-be1b-0ae6797437d0" (UID: "3cedd20e-fadd-4960-be1b-0ae6797437d0"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.981029 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3cedd20e-fadd-4960-be1b-0ae6797437d0" (UID: "3cedd20e-fadd-4960-be1b-0ae6797437d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.989988 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3cedd20e-fadd-4960-be1b-0ae6797437d0" (UID: "3cedd20e-fadd-4960-be1b-0ae6797437d0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:56 crc kubenswrapper[4813]: I0320 16:19:56.995719 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-config-data" (OuterVolumeSpecName: "config-data") pod "3cedd20e-fadd-4960-be1b-0ae6797437d0" (UID: "3cedd20e-fadd-4960-be1b-0ae6797437d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.006741 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3cedd20e-fadd-4960-be1b-0ae6797437d0" (UID: "3cedd20e-fadd-4960-be1b-0ae6797437d0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.050788 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.050830 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3cedd20e-fadd-4960-be1b-0ae6797437d0-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.050843 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft5wt\" (UniqueName: \"kubernetes.io/projected/3cedd20e-fadd-4960-be1b-0ae6797437d0-kube-api-access-ft5wt\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.050855 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.050865 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.050879 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.050892 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/3cedd20e-fadd-4960-be1b-0ae6797437d0-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.100399 4813 generic.go:334] "Generic (PLEG): container finished" podID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerID="233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32" exitCode=0 Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.100433 4813 generic.go:334] "Generic (PLEG): container finished" podID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerID="ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2" exitCode=143 Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.100442 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"3cedd20e-fadd-4960-be1b-0ae6797437d0","Type":"ContainerDied","Data":"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32"} Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.100471 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.100512 4813 scope.go:117] "RemoveContainer" containerID="233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.100495 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"3cedd20e-fadd-4960-be1b-0ae6797437d0","Type":"ContainerDied","Data":"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2"} Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.100617 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"3cedd20e-fadd-4960-be1b-0ae6797437d0","Type":"ContainerDied","Data":"a9212c19a8112cc51f0ab469608db89daa5cc401ee66c2fe98897ab271dbdb0c"} Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.102624 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" event={"ID":"10bd89d7-817c-468e-bf64-fe24b73ea2a2","Type":"ContainerStarted","Data":"b0c39487064ad77ca862706fc1321d278bf4ca772b90c758ef7ff70504911895"} Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.103458 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" event={"ID":"10bd89d7-817c-468e-bf64-fe24b73ea2a2","Type":"ContainerStarted","Data":"8cc1c0e3ffa0bbf04c4a1af5f87adeeffcd211967f32bc5cde8b36b8fac5b74e"} Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.128562 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" podStartSLOduration=2.128537338 podStartE2EDuration="2.128537338s" podCreationTimestamp="2026-03-20 16:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:19:57.118362264 +0000 UTC m=+2526.541065105" watchObservedRunningTime="2026-03-20 16:19:57.128537338 +0000 UTC m=+2526.551240179" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.155501 4813 scope.go:117] "RemoveContainer" containerID="ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.171741 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.184985 4813 scope.go:117] "RemoveContainer" containerID="233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32" Mar 20 16:19:57 crc kubenswrapper[4813]: E0320 16:19:57.188092 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32\": container with ID starting with 233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32 not found: ID does not exist" containerID="233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.188135 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32"} err="failed to get container status \"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32\": rpc error: code = NotFound desc = could not find container \"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32\": container with ID starting with 233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32 not found: ID does not exist" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.188157 4813 scope.go:117] "RemoveContainer" containerID="ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2" Mar 20 16:19:57 crc kubenswrapper[4813]: E0320 16:19:57.189584 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2\": container with ID starting with ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2 not found: ID does not exist" containerID="ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.189614 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2"} err="failed to get container status \"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2\": rpc error: code = NotFound desc = could not find container \"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2\": container with ID starting with ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2 not found: ID does not exist" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.189634 4813 scope.go:117] "RemoveContainer" containerID="233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.190712 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32"} err="failed to get container status \"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32\": rpc error: code = NotFound desc = could not find container \"233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32\": container with ID starting with 233d0656f4b8f166e87d91fa0ad1f08591fb05af9cba2446df9a85a843289a32 not found: ID does not exist" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.190743 4813 scope.go:117] "RemoveContainer" containerID="ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.191426 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2"} err="failed to get container status \"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2\": rpc error: code = NotFound desc = could not find container \"ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2\": container with ID starting with ae3350ec174aa8d0e457b59a8bdbe62bd0806d47671f6d54b9440e6663e1b0d2 not found: ID does not exist" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.194209 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.202743 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:57 crc kubenswrapper[4813]: E0320 16:19:57.203137 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-kuttl-api-log" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.203159 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-kuttl-api-log" Mar 20 16:19:57 crc kubenswrapper[4813]: E0320 16:19:57.203184 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-api" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.203193 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-api" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.203438 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-kuttl-api-log" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.203465 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" containerName="watcher-api" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.204668 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.209637 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-public-svc" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.209704 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-watcher-internal-svc" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.209917 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.215854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.274760 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cedd20e-fadd-4960-be1b-0ae6797437d0" path="/var/lib/kubelet/pods/3cedd20e-fadd-4960-be1b-0ae6797437d0/volumes" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.356830 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpwwc\" (UniqueName: \"kubernetes.io/projected/34b347de-9fdb-42be-9c82-88e2f3f132ef-kube-api-access-gpwwc\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.356996 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34b347de-9fdb-42be-9c82-88e2f3f132ef-logs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.357183 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.357253 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.357293 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.357341 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.357390 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.357454 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.458699 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.458770 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.458833 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpwwc\" (UniqueName: \"kubernetes.io/projected/34b347de-9fdb-42be-9c82-88e2f3f132ef-kube-api-access-gpwwc\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.458869 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34b347de-9fdb-42be-9c82-88e2f3f132ef-logs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.458932 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.458977 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.459004 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.459034 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.459636 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34b347de-9fdb-42be-9c82-88e2f3f132ef-logs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.474283 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.474304 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.475439 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.479971 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-public-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.490292 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpwwc\" (UniqueName: \"kubernetes.io/projected/34b347de-9fdb-42be-9c82-88e2f3f132ef-kube-api-access-gpwwc\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.490872 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.514436 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-internal-tls-certs\") pod \"watcher-kuttl-api-0\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.529822 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.860906 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.972076 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-combined-ca-bundle\") pod \"925d9de3-2778-411a-8e7a-2af03ebc8439\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.972177 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-memcached-tls-certs\") pod \"925d9de3-2778-411a-8e7a-2af03ebc8439\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.972228 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-config-data\") pod \"925d9de3-2778-411a-8e7a-2af03ebc8439\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.972474 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhfkf\" (UniqueName: \"kubernetes.io/projected/925d9de3-2778-411a-8e7a-2af03ebc8439-kube-api-access-bhfkf\") pod \"925d9de3-2778-411a-8e7a-2af03ebc8439\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.972568 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-kolla-config\") pod \"925d9de3-2778-411a-8e7a-2af03ebc8439\" (UID: \"925d9de3-2778-411a-8e7a-2af03ebc8439\") " Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.973293 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-config-data" (OuterVolumeSpecName: "config-data") pod "925d9de3-2778-411a-8e7a-2af03ebc8439" (UID: "925d9de3-2778-411a-8e7a-2af03ebc8439"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.973336 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "925d9de3-2778-411a-8e7a-2af03ebc8439" (UID: "925d9de3-2778-411a-8e7a-2af03ebc8439"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:19:57 crc kubenswrapper[4813]: I0320 16:19:57.976563 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925d9de3-2778-411a-8e7a-2af03ebc8439-kube-api-access-bhfkf" (OuterVolumeSpecName: "kube-api-access-bhfkf") pod "925d9de3-2778-411a-8e7a-2af03ebc8439" (UID: "925d9de3-2778-411a-8e7a-2af03ebc8439"). InnerVolumeSpecName "kube-api-access-bhfkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.002069 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "925d9de3-2778-411a-8e7a-2af03ebc8439" (UID: "925d9de3-2778-411a-8e7a-2af03ebc8439"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.013908 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "925d9de3-2778-411a-8e7a-2af03ebc8439" (UID: "925d9de3-2778-411a-8e7a-2af03ebc8439"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.074139 4813 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.074184 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.074197 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhfkf\" (UniqueName: \"kubernetes.io/projected/925d9de3-2778-411a-8e7a-2af03ebc8439-kube-api-access-bhfkf\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.074212 4813 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/925d9de3-2778-411a-8e7a-2af03ebc8439-kolla-config\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.074222 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/925d9de3-2778-411a-8e7a-2af03ebc8439-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.118803 4813 generic.go:334] "Generic (PLEG): container finished" podID="925d9de3-2778-411a-8e7a-2af03ebc8439" containerID="a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d" exitCode=0 Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.119602 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.121175 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"925d9de3-2778-411a-8e7a-2af03ebc8439","Type":"ContainerDied","Data":"a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d"} Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.121243 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"925d9de3-2778-411a-8e7a-2af03ebc8439","Type":"ContainerDied","Data":"95555513352c667376fcfa440a039d570a50c9975f69b08375b267006414ed7a"} Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.121261 4813 scope.go:117] "RemoveContainer" containerID="a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.128212 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.219766 4813 scope.go:117] "RemoveContainer" containerID="a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d" Mar 20 16:19:58 crc kubenswrapper[4813]: E0320 16:19:58.220532 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d\": container with ID starting with a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d not found: ID does not exist" containerID="a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.220588 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d"} err="failed to get container status \"a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d\": rpc error: code = NotFound desc = could not find container \"a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d\": container with ID starting with a0a77d21596868f5c1c5f87b23822995512382488738cffcfb701279b6e7130d not found: ID does not exist" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.257814 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.269874 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.287673 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 16:19:58 crc kubenswrapper[4813]: E0320 16:19:58.288870 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="925d9de3-2778-411a-8e7a-2af03ebc8439" containerName="memcached" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.288893 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="925d9de3-2778-411a-8e7a-2af03ebc8439" containerName="memcached" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.289073 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="925d9de3-2778-411a-8e7a-2af03ebc8439" containerName="memcached" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.289736 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.298862 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"memcached-config-data" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.299125 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-memcached-svc" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.299658 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"memcached-memcached-dockercfg-g4cpw" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.300991 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.380793 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4df32c5-10f9-4047-9224-c7ecb941a55f-config-data\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.380961 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4df32c5-10f9-4047-9224-c7ecb941a55f-kolla-config\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.381005 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4df32c5-10f9-4047-9224-c7ecb941a55f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.381032 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl2k6\" (UniqueName: \"kubernetes.io/projected/c4df32c5-10f9-4047-9224-c7ecb941a55f-kube-api-access-bl2k6\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.381060 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4df32c5-10f9-4047-9224-c7ecb941a55f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.483509 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4df32c5-10f9-4047-9224-c7ecb941a55f-config-data\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.483609 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4df32c5-10f9-4047-9224-c7ecb941a55f-kolla-config\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.483651 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4df32c5-10f9-4047-9224-c7ecb941a55f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.483676 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl2k6\" (UniqueName: \"kubernetes.io/projected/c4df32c5-10f9-4047-9224-c7ecb941a55f-kube-api-access-bl2k6\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.483711 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4df32c5-10f9-4047-9224-c7ecb941a55f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.485126 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4df32c5-10f9-4047-9224-c7ecb941a55f-kolla-config\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.486025 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4df32c5-10f9-4047-9224-c7ecb941a55f-config-data\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.494853 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4df32c5-10f9-4047-9224-c7ecb941a55f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.495111 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4df32c5-10f9-4047-9224-c7ecb941a55f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.504305 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl2k6\" (UniqueName: \"kubernetes.io/projected/c4df32c5-10f9-4047-9224-c7ecb941a55f-kube-api-access-bl2k6\") pod \"memcached-0\" (UID: \"c4df32c5-10f9-4047-9224-c7ecb941a55f\") " pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.660562 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.873281 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.995997 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb2dk\" (UniqueName: \"kubernetes.io/projected/3ea44c55-d972-4299-9b77-b3569cd2cbf8-kube-api-access-rb2dk\") pod \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.996122 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-combined-ca-bundle\") pod \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.996181 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-config-data\") pod \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.996233 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ea44c55-d972-4299-9b77-b3569cd2cbf8-logs\") pod \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\" (UID: \"3ea44c55-d972-4299-9b77-b3569cd2cbf8\") " Mar 20 16:19:58 crc kubenswrapper[4813]: I0320 16:19:58.996828 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ea44c55-d972-4299-9b77-b3569cd2cbf8-logs" (OuterVolumeSpecName: "logs") pod "3ea44c55-d972-4299-9b77-b3569cd2cbf8" (UID: "3ea44c55-d972-4299-9b77-b3569cd2cbf8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.002192 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ea44c55-d972-4299-9b77-b3569cd2cbf8-kube-api-access-rb2dk" (OuterVolumeSpecName: "kube-api-access-rb2dk") pod "3ea44c55-d972-4299-9b77-b3569cd2cbf8" (UID: "3ea44c55-d972-4299-9b77-b3569cd2cbf8"). InnerVolumeSpecName "kube-api-access-rb2dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.038616 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ea44c55-d972-4299-9b77-b3569cd2cbf8" (UID: "3ea44c55-d972-4299-9b77-b3569cd2cbf8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.040492 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-config-data" (OuterVolumeSpecName: "config-data") pod "3ea44c55-d972-4299-9b77-b3569cd2cbf8" (UID: "3ea44c55-d972-4299-9b77-b3569cd2cbf8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.098381 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb2dk\" (UniqueName: \"kubernetes.io/projected/3ea44c55-d972-4299-9b77-b3569cd2cbf8-kube-api-access-rb2dk\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.099128 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.099236 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea44c55-d972-4299-9b77-b3569cd2cbf8-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.099333 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ea44c55-d972-4299-9b77-b3569cd2cbf8-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.128556 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"34b347de-9fdb-42be-9c82-88e2f3f132ef","Type":"ContainerStarted","Data":"8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3"} Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.128599 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"34b347de-9fdb-42be-9c82-88e2f3f132ef","Type":"ContainerStarted","Data":"8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de"} Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.128611 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"34b347de-9fdb-42be-9c82-88e2f3f132ef","Type":"ContainerStarted","Data":"bbfef06b641f331bbceb4f6470debefacf90dc3ef0f274f73166944a80a6ed63"} Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.129079 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.129983 4813 generic.go:334] "Generic (PLEG): container finished" podID="3ea44c55-d972-4299-9b77-b3569cd2cbf8" containerID="21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" exitCode=0 Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.130014 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.130042 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"3ea44c55-d972-4299-9b77-b3569cd2cbf8","Type":"ContainerDied","Data":"21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae"} Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.130066 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"3ea44c55-d972-4299-9b77-b3569cd2cbf8","Type":"ContainerDied","Data":"033d321c88d11bc4633ec7a9907de07a11ee0eb249139a0744ca697c86e0980c"} Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.130089 4813 scope.go:117] "RemoveContainer" containerID="21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.152908 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.152871476 podStartE2EDuration="2.152871476s" podCreationTimestamp="2026-03-20 16:19:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:19:59.145016174 +0000 UTC m=+2528.567719015" watchObservedRunningTime="2026-03-20 16:19:59.152871476 +0000 UTC m=+2528.575574317" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.171635 4813 scope.go:117] "RemoveContainer" containerID="21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" Mar 20 16:19:59 crc kubenswrapper[4813]: E0320 16:19:59.172207 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae\": container with ID starting with 21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae not found: ID does not exist" containerID="21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.172252 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae"} err="failed to get container status \"21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae\": rpc error: code = NotFound desc = could not find container \"21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae\": container with ID starting with 21df45d28870fc9dc1b1c33789d228b072f1a20a1a58aee766a25cae7530e8ae not found: ID does not exist" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.181162 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.199435 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.213536 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:59 crc kubenswrapper[4813]: E0320 16:19:59.213912 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ea44c55-d972-4299-9b77-b3569cd2cbf8" containerName="watcher-applier" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.213935 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ea44c55-d972-4299-9b77-b3569cd2cbf8" containerName="watcher-applier" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.214089 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ea44c55-d972-4299-9b77-b3569cd2cbf8" containerName="watcher-applier" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.214676 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.219803 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.227704 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/memcached-0"] Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.245590 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.268024 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:19:59 crc kubenswrapper[4813]: E0320 16:19:59.268233 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.281652 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ea44c55-d972-4299-9b77-b3569cd2cbf8" path="/var/lib/kubelet/pods/3ea44c55-d972-4299-9b77-b3569cd2cbf8/volumes" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.282257 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925d9de3-2778-411a-8e7a-2af03ebc8439" path="/var/lib/kubelet/pods/925d9de3-2778-411a-8e7a-2af03ebc8439/volumes" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.302062 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.302111 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr2tc\" (UniqueName: \"kubernetes.io/projected/00d24df6-eafd-481a-8380-7e4b949304fc-kube-api-access-vr2tc\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.302203 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.302286 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d24df6-eafd-481a-8380-7e4b949304fc-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.302324 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.403274 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.403380 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d24df6-eafd-481a-8380-7e4b949304fc-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.403410 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.403450 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.403469 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr2tc\" (UniqueName: \"kubernetes.io/projected/00d24df6-eafd-481a-8380-7e4b949304fc-kube-api-access-vr2tc\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.404367 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d24df6-eafd-481a-8380-7e4b949304fc-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.407405 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.407635 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.408268 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.425786 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr2tc\" (UniqueName: \"kubernetes.io/projected/00d24df6-eafd-481a-8380-7e4b949304fc-kube-api-access-vr2tc\") pod \"watcher-kuttl-applier-0\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.430132 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:19:59 crc kubenswrapper[4813]: W0320 16:19:59.932449 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d24df6_eafd_481a_8380_7e4b949304fc.slice/crio-44119ffcb27963fdf2c136ff180edbe9d5d858e3f34d9c9532381063340c6252 WatchSource:0}: Error finding container 44119ffcb27963fdf2c136ff180edbe9d5d858e3f34d9c9532381063340c6252: Status 404 returned error can't find the container with id 44119ffcb27963fdf2c136ff180edbe9d5d858e3f34d9c9532381063340c6252 Mar 20 16:19:59 crc kubenswrapper[4813]: I0320 16:19:59.946674 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.144145 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567060-66xlh"] Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.145948 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567060-66xlh" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.150127 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.150387 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.151752 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.152474 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"c4df32c5-10f9-4047-9224-c7ecb941a55f","Type":"ContainerStarted","Data":"a8fef5ff357c1c821aa0d5bfbc68f09c729573dce07271c4f3165554c3dd4348"} Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.152576 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"c4df32c5-10f9-4047-9224-c7ecb941a55f","Type":"ContainerStarted","Data":"788575acada651adf7cc962a56ceb7e664c4294ecaed6e78bb784d2454ca12ff"} Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.152694 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/memcached-0" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.155451 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567060-66xlh"] Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.158552 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"00d24df6-eafd-481a-8380-7e4b949304fc","Type":"ContainerStarted","Data":"44119ffcb27963fdf2c136ff180edbe9d5d858e3f34d9c9532381063340c6252"} Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.158735 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.164732 4813 generic.go:334] "Generic (PLEG): container finished" podID="2fe5fa8d-cd06-4d52-b290-1f57db49a360" containerID="c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66" exitCode=0 Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.164814 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"2fe5fa8d-cd06-4d52-b290-1f57db49a360","Type":"ContainerDied","Data":"c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66"} Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.164861 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"2fe5fa8d-cd06-4d52-b290-1f57db49a360","Type":"ContainerDied","Data":"e1b735b3367011a2e8fbd81a976c730e2f0e68eb10de54a0b7a2fe235f80c913"} Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.164957 4813 scope.go:117] "RemoveContainer" containerID="c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.193363 4813 generic.go:334] "Generic (PLEG): container finished" podID="10bd89d7-817c-468e-bf64-fe24b73ea2a2" containerID="b0c39487064ad77ca862706fc1321d278bf4ca772b90c758ef7ff70504911895" exitCode=0 Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.193460 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" event={"ID":"10bd89d7-817c-468e-bf64-fe24b73ea2a2","Type":"ContainerDied","Data":"b0c39487064ad77ca862706fc1321d278bf4ca772b90c758ef7ff70504911895"} Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.205557 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/memcached-0" podStartSLOduration=2.205538313 podStartE2EDuration="2.205538313s" podCreationTimestamp="2026-03-20 16:19:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:20:00.202716236 +0000 UTC m=+2529.625419067" watchObservedRunningTime="2026-03-20 16:20:00.205538313 +0000 UTC m=+2529.628241154" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.217036 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw74x\" (UniqueName: \"kubernetes.io/projected/2fe5fa8d-cd06-4d52-b290-1f57db49a360-kube-api-access-nw74x\") pod \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.217102 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-config-data\") pod \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.217143 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-combined-ca-bundle\") pod \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.217217 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe5fa8d-cd06-4d52-b290-1f57db49a360-logs\") pod \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.217274 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-custom-prometheus-ca\") pod \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\" (UID: \"2fe5fa8d-cd06-4d52-b290-1f57db49a360\") " Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.217522 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89cw9\" (UniqueName: \"kubernetes.io/projected/750e356e-89e8-43b9-9d7d-54cbefcb65bd-kube-api-access-89cw9\") pod \"auto-csr-approver-29567060-66xlh\" (UID: \"750e356e-89e8-43b9-9d7d-54cbefcb65bd\") " pod="openshift-infra/auto-csr-approver-29567060-66xlh" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.223552 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe5fa8d-cd06-4d52-b290-1f57db49a360-kube-api-access-nw74x" (OuterVolumeSpecName: "kube-api-access-nw74x") pod "2fe5fa8d-cd06-4d52-b290-1f57db49a360" (UID: "2fe5fa8d-cd06-4d52-b290-1f57db49a360"). InnerVolumeSpecName "kube-api-access-nw74x". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.223758 4813 scope.go:117] "RemoveContainer" containerID="c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.224193 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fe5fa8d-cd06-4d52-b290-1f57db49a360-logs" (OuterVolumeSpecName: "logs") pod "2fe5fa8d-cd06-4d52-b290-1f57db49a360" (UID: "2fe5fa8d-cd06-4d52-b290-1f57db49a360"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:20:00 crc kubenswrapper[4813]: E0320 16:20:00.226753 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66\": container with ID starting with c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66 not found: ID does not exist" containerID="c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.226787 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66"} err="failed to get container status \"c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66\": rpc error: code = NotFound desc = could not find container \"c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66\": container with ID starting with c193c6d095e094787c61bd72a2738aed74888e19db2737ecf02667386a630f66 not found: ID does not exist" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.247729 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "2fe5fa8d-cd06-4d52-b290-1f57db49a360" (UID: "2fe5fa8d-cd06-4d52-b290-1f57db49a360"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.250850 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2fe5fa8d-cd06-4d52-b290-1f57db49a360" (UID: "2fe5fa8d-cd06-4d52-b290-1f57db49a360"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.285475 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-config-data" (OuterVolumeSpecName: "config-data") pod "2fe5fa8d-cd06-4d52-b290-1f57db49a360" (UID: "2fe5fa8d-cd06-4d52-b290-1f57db49a360"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.319050 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89cw9\" (UniqueName: \"kubernetes.io/projected/750e356e-89e8-43b9-9d7d-54cbefcb65bd-kube-api-access-89cw9\") pod \"auto-csr-approver-29567060-66xlh\" (UID: \"750e356e-89e8-43b9-9d7d-54cbefcb65bd\") " pod="openshift-infra/auto-csr-approver-29567060-66xlh" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.319167 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.319178 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.319188 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2fe5fa8d-cd06-4d52-b290-1f57db49a360-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.319197 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2fe5fa8d-cd06-4d52-b290-1f57db49a360-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.319205 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw74x\" (UniqueName: \"kubernetes.io/projected/2fe5fa8d-cd06-4d52-b290-1f57db49a360-kube-api-access-nw74x\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.338919 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89cw9\" (UniqueName: \"kubernetes.io/projected/750e356e-89e8-43b9-9d7d-54cbefcb65bd-kube-api-access-89cw9\") pod \"auto-csr-approver-29567060-66xlh\" (UID: \"750e356e-89e8-43b9-9d7d-54cbefcb65bd\") " pod="openshift-infra/auto-csr-approver-29567060-66xlh" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.484651 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567060-66xlh" Mar 20 16:20:00 crc kubenswrapper[4813]: I0320 16:20:00.995109 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567060-66xlh"] Mar 20 16:20:01 crc kubenswrapper[4813]: W0320 16:20:01.000505 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750e356e_89e8_43b9_9d7d_54cbefcb65bd.slice/crio-5a4cd87f82b32a2c56f8de5df2b2d8ae549d3436cc4bcac5dbd95851c3fcbeec WatchSource:0}: Error finding container 5a4cd87f82b32a2c56f8de5df2b2d8ae549d3436cc4bcac5dbd95851c3fcbeec: Status 404 returned error can't find the container with id 5a4cd87f82b32a2c56f8de5df2b2d8ae549d3436cc4bcac5dbd95851c3fcbeec Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.203247 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567060-66xlh" event={"ID":"750e356e-89e8-43b9-9d7d-54cbefcb65bd","Type":"ContainerStarted","Data":"5a4cd87f82b32a2c56f8de5df2b2d8ae549d3436cc4bcac5dbd95851c3fcbeec"} Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.206387 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"00d24df6-eafd-481a-8380-7e4b949304fc","Type":"ContainerStarted","Data":"b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60"} Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.208840 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.241240 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=2.241216761 podStartE2EDuration="2.241216761s" podCreationTimestamp="2026-03-20 16:19:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:20:01.222742273 +0000 UTC m=+2530.645445124" watchObservedRunningTime="2026-03-20 16:20:01.241216761 +0000 UTC m=+2530.663919602" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.279923 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.288868 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.301018 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:20:01 crc kubenswrapper[4813]: E0320 16:20:01.301313 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe5fa8d-cd06-4d52-b290-1f57db49a360" containerName="watcher-decision-engine" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.301328 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe5fa8d-cd06-4d52-b290-1f57db49a360" containerName="watcher-decision-engine" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.301478 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe5fa8d-cd06-4d52-b290-1f57db49a360" containerName="watcher-decision-engine" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.302005 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.304229 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.312415 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.440598 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.440672 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c092d35a-2592-45c5-a196-17f993b66c7e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.440712 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.440755 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.440833 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md2bf\" (UniqueName: \"kubernetes.io/projected/c092d35a-2592-45c5-a196-17f993b66c7e-kube-api-access-md2bf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.440877 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.542670 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.542791 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md2bf\" (UniqueName: \"kubernetes.io/projected/c092d35a-2592-45c5-a196-17f993b66c7e-kube-api-access-md2bf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.542852 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.542918 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.542963 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c092d35a-2592-45c5-a196-17f993b66c7e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.542998 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.548870 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.549573 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c092d35a-2592-45c5-a196-17f993b66c7e-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.549743 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.551708 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.557606 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.570155 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md2bf\" (UniqueName: \"kubernetes.io/projected/c092d35a-2592-45c5-a196-17f993b66c7e-kube-api-access-md2bf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.630755 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.650299 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.664772 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.746505 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-credential-keys\") pod \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.746578 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-scripts\") pod \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.746641 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-cert-memcached-mtls\") pod \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.746692 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-combined-ca-bundle\") pod \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.746756 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-fernet-keys\") pod \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.746787 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-config-data\") pod \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.746807 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjb5s\" (UniqueName: \"kubernetes.io/projected/10bd89d7-817c-468e-bf64-fe24b73ea2a2-kube-api-access-vjb5s\") pod \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\" (UID: \"10bd89d7-817c-468e-bf64-fe24b73ea2a2\") " Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.751133 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10bd89d7-817c-468e-bf64-fe24b73ea2a2-kube-api-access-vjb5s" (OuterVolumeSpecName: "kube-api-access-vjb5s") pod "10bd89d7-817c-468e-bf64-fe24b73ea2a2" (UID: "10bd89d7-817c-468e-bf64-fe24b73ea2a2"). InnerVolumeSpecName "kube-api-access-vjb5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.752666 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-scripts" (OuterVolumeSpecName: "scripts") pod "10bd89d7-817c-468e-bf64-fe24b73ea2a2" (UID: "10bd89d7-817c-468e-bf64-fe24b73ea2a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.753204 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "10bd89d7-817c-468e-bf64-fe24b73ea2a2" (UID: "10bd89d7-817c-468e-bf64-fe24b73ea2a2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.753359 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "10bd89d7-817c-468e-bf64-fe24b73ea2a2" (UID: "10bd89d7-817c-468e-bf64-fe24b73ea2a2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.777683 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-config-data" (OuterVolumeSpecName: "config-data") pod "10bd89d7-817c-468e-bf64-fe24b73ea2a2" (UID: "10bd89d7-817c-468e-bf64-fe24b73ea2a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.778703 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10bd89d7-817c-468e-bf64-fe24b73ea2a2" (UID: "10bd89d7-817c-468e-bf64-fe24b73ea2a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.834243 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "10bd89d7-817c-468e-bf64-fe24b73ea2a2" (UID: "10bd89d7-817c-468e-bf64-fe24b73ea2a2"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.851248 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.851282 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.851294 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.851302 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-fernet-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.851313 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.851323 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjb5s\" (UniqueName: \"kubernetes.io/projected/10bd89d7-817c-468e-bf64-fe24b73ea2a2-kube-api-access-vjb5s\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:01 crc kubenswrapper[4813]: I0320 16:20:01.851330 4813 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/10bd89d7-817c-468e-bf64-fe24b73ea2a2-credential-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:02 crc kubenswrapper[4813]: I0320 16:20:02.176992 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:20:02 crc kubenswrapper[4813]: W0320 16:20:02.181321 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc092d35a_2592_45c5_a196_17f993b66c7e.slice/crio-4477d49990e8b7abb681b2b04afcdbbfb53f67cafd0c4b8ab7e57a08036513d5 WatchSource:0}: Error finding container 4477d49990e8b7abb681b2b04afcdbbfb53f67cafd0c4b8ab7e57a08036513d5: Status 404 returned error can't find the container with id 4477d49990e8b7abb681b2b04afcdbbfb53f67cafd0c4b8ab7e57a08036513d5 Mar 20 16:20:02 crc kubenswrapper[4813]: I0320 16:20:02.220080 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" event={"ID":"10bd89d7-817c-468e-bf64-fe24b73ea2a2","Type":"ContainerDied","Data":"8cc1c0e3ffa0bbf04c4a1af5f87adeeffcd211967f32bc5cde8b36b8fac5b74e"} Mar 20 16:20:02 crc kubenswrapper[4813]: I0320 16:20:02.223197 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cc1c0e3ffa0bbf04c4a1af5f87adeeffcd211967f32bc5cde8b36b8fac5b74e" Mar 20 16:20:02 crc kubenswrapper[4813]: I0320 16:20:02.223315 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"c092d35a-2592-45c5-a196-17f993b66c7e","Type":"ContainerStarted","Data":"4477d49990e8b7abb681b2b04afcdbbfb53f67cafd0c4b8ab7e57a08036513d5"} Mar 20 16:20:02 crc kubenswrapper[4813]: I0320 16:20:02.220361 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-z6ggq" Mar 20 16:20:02 crc kubenswrapper[4813]: I0320 16:20:02.537713 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:03 crc kubenswrapper[4813]: I0320 16:20:03.231370 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"c092d35a-2592-45c5-a196-17f993b66c7e","Type":"ContainerStarted","Data":"4332d088397ffa5bef8f4c0fd01832136c2d7e36f2cc2070dde5f1c4b95e0d74"} Mar 20 16:20:03 crc kubenswrapper[4813]: I0320 16:20:03.259736 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.259712141 podStartE2EDuration="2.259712141s" podCreationTimestamp="2026-03-20 16:20:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:20:03.24926759 +0000 UTC m=+2532.671970461" watchObservedRunningTime="2026-03-20 16:20:03.259712141 +0000 UTC m=+2532.682415002" Mar 20 16:20:03 crc kubenswrapper[4813]: I0320 16:20:03.284580 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fe5fa8d-cd06-4d52-b290-1f57db49a360" path="/var/lib/kubelet/pods/2fe5fa8d-cd06-4d52-b290-1f57db49a360/volumes" Mar 20 16:20:04 crc kubenswrapper[4813]: I0320 16:20:04.252925 4813 generic.go:334] "Generic (PLEG): container finished" podID="750e356e-89e8-43b9-9d7d-54cbefcb65bd" containerID="219012ed31007297769945ff0ddb04225f14ab5da152e490631ec4ffa2cd82e0" exitCode=0 Mar 20 16:20:04 crc kubenswrapper[4813]: I0320 16:20:04.253028 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567060-66xlh" event={"ID":"750e356e-89e8-43b9-9d7d-54cbefcb65bd","Type":"ContainerDied","Data":"219012ed31007297769945ff0ddb04225f14ab5da152e490631ec4ffa2cd82e0"} Mar 20 16:20:04 crc kubenswrapper[4813]: I0320 16:20:04.431756 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:20:05 crc kubenswrapper[4813]: I0320 16:20:05.640553 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567060-66xlh" Mar 20 16:20:05 crc kubenswrapper[4813]: I0320 16:20:05.714140 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89cw9\" (UniqueName: \"kubernetes.io/projected/750e356e-89e8-43b9-9d7d-54cbefcb65bd-kube-api-access-89cw9\") pod \"750e356e-89e8-43b9-9d7d-54cbefcb65bd\" (UID: \"750e356e-89e8-43b9-9d7d-54cbefcb65bd\") " Mar 20 16:20:05 crc kubenswrapper[4813]: I0320 16:20:05.719914 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/750e356e-89e8-43b9-9d7d-54cbefcb65bd-kube-api-access-89cw9" (OuterVolumeSpecName: "kube-api-access-89cw9") pod "750e356e-89e8-43b9-9d7d-54cbefcb65bd" (UID: "750e356e-89e8-43b9-9d7d-54cbefcb65bd"). InnerVolumeSpecName "kube-api-access-89cw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:20:05 crc kubenswrapper[4813]: I0320 16:20:05.816392 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89cw9\" (UniqueName: \"kubernetes.io/projected/750e356e-89e8-43b9-9d7d-54cbefcb65bd-kube-api-access-89cw9\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:06 crc kubenswrapper[4813]: I0320 16:20:06.284907 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567060-66xlh" event={"ID":"750e356e-89e8-43b9-9d7d-54cbefcb65bd","Type":"ContainerDied","Data":"5a4cd87f82b32a2c56f8de5df2b2d8ae549d3436cc4bcac5dbd95851c3fcbeec"} Mar 20 16:20:06 crc kubenswrapper[4813]: I0320 16:20:06.284951 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a4cd87f82b32a2c56f8de5df2b2d8ae549d3436cc4bcac5dbd95851c3fcbeec" Mar 20 16:20:06 crc kubenswrapper[4813]: I0320 16:20:06.285013 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567060-66xlh" Mar 20 16:20:06 crc kubenswrapper[4813]: I0320 16:20:06.722219 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567054-fglrr"] Mar 20 16:20:06 crc kubenswrapper[4813]: I0320 16:20:06.728451 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567054-fglrr"] Mar 20 16:20:07 crc kubenswrapper[4813]: I0320 16:20:07.276769 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbff87fc-a059-46bb-a25d-21485ab668c8" path="/var/lib/kubelet/pods/cbff87fc-a059-46bb-a25d-21485ab668c8/volumes" Mar 20 16:20:07 crc kubenswrapper[4813]: I0320 16:20:07.530883 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:07 crc kubenswrapper[4813]: I0320 16:20:07.539053 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.308837 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.662850 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/memcached-0" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.801594 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-5d69f9588b-zsrh7"] Mar 20 16:20:08 crc kubenswrapper[4813]: E0320 16:20:08.802152 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="750e356e-89e8-43b9-9d7d-54cbefcb65bd" containerName="oc" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.802169 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="750e356e-89e8-43b9-9d7d-54cbefcb65bd" containerName="oc" Mar 20 16:20:08 crc kubenswrapper[4813]: E0320 16:20:08.802183 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10bd89d7-817c-468e-bf64-fe24b73ea2a2" containerName="keystone-bootstrap" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.802190 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="10bd89d7-817c-468e-bf64-fe24b73ea2a2" containerName="keystone-bootstrap" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.802350 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="750e356e-89e8-43b9-9d7d-54cbefcb65bd" containerName="oc" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.802378 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="10bd89d7-817c-468e-bf64-fe24b73ea2a2" containerName="keystone-bootstrap" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.802908 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.820299 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-5d69f9588b-zsrh7"] Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.877199 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-combined-ca-bundle\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.877509 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-fernet-keys\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.877533 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-public-tls-certs\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.877692 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-internal-tls-certs\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.877737 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvvtb\" (UniqueName: \"kubernetes.io/projected/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-kube-api-access-nvvtb\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.877894 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-scripts\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.877978 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-cert-memcached-mtls\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.878039 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-config-data\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.878083 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-credential-keys\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.979615 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvvtb\" (UniqueName: \"kubernetes.io/projected/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-kube-api-access-nvvtb\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.979956 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-scripts\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.980054 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-cert-memcached-mtls\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.980169 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-config-data\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.980450 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-credential-keys\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.981256 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-combined-ca-bundle\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.981373 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-fernet-keys\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.981452 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-public-tls-certs\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.981574 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-internal-tls-certs\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.990145 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-public-tls-certs\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.991162 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-credential-keys\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.991352 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-internal-tls-certs\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.991666 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-combined-ca-bundle\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.991809 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-fernet-keys\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.992077 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-cert-memcached-mtls\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.996795 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-scripts\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.997736 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvvtb\" (UniqueName: \"kubernetes.io/projected/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-kube-api-access-nvvtb\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:08 crc kubenswrapper[4813]: I0320 16:20:08.998881 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488cca2e-7391-4ebb-9f9d-e6d5f04e94c4-config-data\") pod \"keystone-5d69f9588b-zsrh7\" (UID: \"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4\") " pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:09 crc kubenswrapper[4813]: I0320 16:20:09.127274 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:09 crc kubenswrapper[4813]: I0320 16:20:09.430836 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:20:09 crc kubenswrapper[4813]: I0320 16:20:09.456723 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:20:09 crc kubenswrapper[4813]: I0320 16:20:09.596620 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-5d69f9588b-zsrh7"] Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.318739 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" event={"ID":"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4","Type":"ContainerStarted","Data":"fb17e614eb7e921e0b18b20805e7a78b97decf56f3347f87489a5719c0619f46"} Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.319091 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" event={"ID":"488cca2e-7391-4ebb-9f9d-e6d5f04e94c4","Type":"ContainerStarted","Data":"63235ea98d8824e9908e8cb8a86b50bbc567dac6bc4c9b20656020b2878c5ba3"} Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.319376 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.346641 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.351870 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" podStartSLOduration=2.351847447 podStartE2EDuration="2.351847447s" podCreationTimestamp="2026-03-20 16:20:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:20:10.342593868 +0000 UTC m=+2539.765296719" watchObservedRunningTime="2026-03-20 16:20:10.351847447 +0000 UTC m=+2539.774550288" Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.430703 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.431902 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-api" containerID="cri-o://8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3" gracePeriod=30 Mar 20 16:20:10 crc kubenswrapper[4813]: I0320 16:20:10.433751 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-kuttl-api-log" containerID="cri-o://8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de" gracePeriod=30 Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.328001 4813 generic.go:334] "Generic (PLEG): container finished" podID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerID="8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de" exitCode=143 Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.328083 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"34b347de-9fdb-42be-9c82-88e2f3f132ef","Type":"ContainerDied","Data":"8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de"} Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.631845 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.660054 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.716614 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.827671 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-custom-prometheus-ca\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.827981 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-internal-tls-certs\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.828067 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34b347de-9fdb-42be-9c82-88e2f3f132ef-logs\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.828550 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34b347de-9fdb-42be-9c82-88e2f3f132ef-logs" (OuterVolumeSpecName: "logs") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.828632 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-public-tls-certs\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.828991 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpwwc\" (UniqueName: \"kubernetes.io/projected/34b347de-9fdb-42be-9c82-88e2f3f132ef-kube-api-access-gpwwc\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.829062 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-config-data\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.829102 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-combined-ca-bundle\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.829152 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-cert-memcached-mtls\") pod \"34b347de-9fdb-42be-9c82-88e2f3f132ef\" (UID: \"34b347de-9fdb-42be-9c82-88e2f3f132ef\") " Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.829576 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34b347de-9fdb-42be-9c82-88e2f3f132ef-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.833763 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34b347de-9fdb-42be-9c82-88e2f3f132ef-kube-api-access-gpwwc" (OuterVolumeSpecName: "kube-api-access-gpwwc") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "kube-api-access-gpwwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.869217 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.870467 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.883987 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.885626 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-config-data" (OuterVolumeSpecName: "config-data") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.886103 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.912684 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "34b347de-9fdb-42be-9c82-88e2f3f132ef" (UID: "34b347de-9fdb-42be-9c82-88e2f3f132ef"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.931517 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpwwc\" (UniqueName: \"kubernetes.io/projected/34b347de-9fdb-42be-9c82-88e2f3f132ef-kube-api-access-gpwwc\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.931560 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.931572 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.931585 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.931595 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.931606 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:11 crc kubenswrapper[4813]: I0320 16:20:11.931618 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34b347de-9fdb-42be-9c82-88e2f3f132ef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.337891 4813 generic.go:334] "Generic (PLEG): container finished" podID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerID="8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3" exitCode=0 Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.338916 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.342629 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"34b347de-9fdb-42be-9c82-88e2f3f132ef","Type":"ContainerDied","Data":"8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3"} Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.342673 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.342686 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"34b347de-9fdb-42be-9c82-88e2f3f132ef","Type":"ContainerDied","Data":"bbfef06b641f331bbceb4f6470debefacf90dc3ef0f274f73166944a80a6ed63"} Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.343007 4813 scope.go:117] "RemoveContainer" containerID="8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.375431 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.426085 4813 scope.go:117] "RemoveContainer" containerID="8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.432009 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.444795 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.445327 4813 scope.go:117] "RemoveContainer" containerID="8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3" Mar 20 16:20:12 crc kubenswrapper[4813]: E0320 16:20:12.449634 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3\": container with ID starting with 8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3 not found: ID does not exist" containerID="8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.449896 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3"} err="failed to get container status \"8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3\": rpc error: code = NotFound desc = could not find container \"8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3\": container with ID starting with 8066b647263878b79d14cfe543b054fa6aff46f8704e3c564ffb922ba348eaa3 not found: ID does not exist" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.449930 4813 scope.go:117] "RemoveContainer" containerID="8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de" Mar 20 16:20:12 crc kubenswrapper[4813]: E0320 16:20:12.450635 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de\": container with ID starting with 8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de not found: ID does not exist" containerID="8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.450659 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de"} err="failed to get container status \"8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de\": rpc error: code = NotFound desc = could not find container \"8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de\": container with ID starting with 8b2d46b33a7942d65ab951c6d68e61c45926b96e885de7924c5f3e167b61d3de not found: ID does not exist" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.455957 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:20:12 crc kubenswrapper[4813]: E0320 16:20:12.456444 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-api" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.456466 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-api" Mar 20 16:20:12 crc kubenswrapper[4813]: E0320 16:20:12.456508 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-kuttl-api-log" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.456521 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-kuttl-api-log" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.456801 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-api" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.456827 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" containerName="watcher-kuttl-api-log" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.458460 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.462085 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.469418 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.541677 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6kzr\" (UniqueName: \"kubernetes.io/projected/660b0cb7-523e-488a-b267-43331525e435-kube-api-access-j6kzr\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.541729 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.541757 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.541890 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.541938 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.541975 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660b0cb7-523e-488a-b267-43331525e435-logs\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.643191 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.643261 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660b0cb7-523e-488a-b267-43331525e435-logs\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.643296 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6kzr\" (UniqueName: \"kubernetes.io/projected/660b0cb7-523e-488a-b267-43331525e435-kube-api-access-j6kzr\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.643325 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.643351 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.643501 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.644656 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660b0cb7-523e-488a-b267-43331525e435-logs\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.647396 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.647497 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.647574 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.650792 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.667017 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6kzr\" (UniqueName: \"kubernetes.io/projected/660b0cb7-523e-488a-b267-43331525e435-kube-api-access-j6kzr\") pod \"watcher-kuttl-api-0\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:12 crc kubenswrapper[4813]: I0320 16:20:12.783879 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:13 crc kubenswrapper[4813]: I0320 16:20:13.269393 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:20:13 crc kubenswrapper[4813]: E0320 16:20:13.269868 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:20:13 crc kubenswrapper[4813]: I0320 16:20:13.294994 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34b347de-9fdb-42be-9c82-88e2f3f132ef" path="/var/lib/kubelet/pods/34b347de-9fdb-42be-9c82-88e2f3f132ef/volumes" Mar 20 16:20:13 crc kubenswrapper[4813]: I0320 16:20:13.331163 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:20:14 crc kubenswrapper[4813]: I0320 16:20:14.399549 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"660b0cb7-523e-488a-b267-43331525e435","Type":"ContainerStarted","Data":"2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07"} Mar 20 16:20:14 crc kubenswrapper[4813]: I0320 16:20:14.400005 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:14 crc kubenswrapper[4813]: I0320 16:20:14.400019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"660b0cb7-523e-488a-b267-43331525e435","Type":"ContainerStarted","Data":"26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba"} Mar 20 16:20:14 crc kubenswrapper[4813]: I0320 16:20:14.400028 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"660b0cb7-523e-488a-b267-43331525e435","Type":"ContainerStarted","Data":"16563f7aeadd4a3de67c54eda39c7a36971a18987947d4c5fab89274f885c887"} Mar 20 16:20:15 crc kubenswrapper[4813]: I0320 16:20:15.386661 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:15 crc kubenswrapper[4813]: I0320 16:20:15.418335 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=3.41830883 podStartE2EDuration="3.41830883s" podCreationTimestamp="2026-03-20 16:20:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:20:14.421935391 +0000 UTC m=+2543.844638232" watchObservedRunningTime="2026-03-20 16:20:15.41830883 +0000 UTC m=+2544.841011711" Mar 20 16:20:16 crc kubenswrapper[4813]: I0320 16:20:16.809283 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:17 crc kubenswrapper[4813]: I0320 16:20:17.784673 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:18 crc kubenswrapper[4813]: I0320 16:20:18.020272 4813 scope.go:117] "RemoveContainer" containerID="6f742d180ea65b4d95c4ca8d4becad4e1c9241734664bdcdc3554c8eebb6dd35" Mar 20 16:20:22 crc kubenswrapper[4813]: I0320 16:20:22.785026 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:22 crc kubenswrapper[4813]: I0320 16:20:22.797297 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:23 crc kubenswrapper[4813]: I0320 16:20:23.514037 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:20:25 crc kubenswrapper[4813]: I0320 16:20:25.269904 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:20:25 crc kubenswrapper[4813]: E0320 16:20:25.270404 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:20:38 crc kubenswrapper[4813]: I0320 16:20:38.266042 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:20:38 crc kubenswrapper[4813]: E0320 16:20:38.267315 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:20:40 crc kubenswrapper[4813]: I0320 16:20:40.622005 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/keystone-5d69f9588b-zsrh7" Mar 20 16:20:40 crc kubenswrapper[4813]: I0320 16:20:40.707421 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-8469ccf466-qnct6"] Mar 20 16:20:40 crc kubenswrapper[4813]: I0320 16:20:40.707710 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" podUID="db6d5669-5b48-48ab-8f34-5cd0ecb91d93" containerName="keystone-api" containerID="cri-o://500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552" gracePeriod=30 Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.593205 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2ztqb"] Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.595249 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.605552 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2ztqb"] Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.669550 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gx5z\" (UniqueName: \"kubernetes.io/projected/ae06b4b9-c759-4118-b9c1-a03dce5c759b-kube-api-access-4gx5z\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.669620 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-utilities\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.669826 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-catalog-content\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.771566 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gx5z\" (UniqueName: \"kubernetes.io/projected/ae06b4b9-c759-4118-b9c1-a03dce5c759b-kube-api-access-4gx5z\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.771664 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-utilities\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.771727 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-catalog-content\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.772215 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-catalog-content\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.774045 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-utilities\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.799756 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gx5z\" (UniqueName: \"kubernetes.io/projected/ae06b4b9-c759-4118-b9c1-a03dce5c759b-kube-api-access-4gx5z\") pod \"certified-operators-2ztqb\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:41 crc kubenswrapper[4813]: I0320 16:20:41.917339 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:42 crc kubenswrapper[4813]: I0320 16:20:42.406175 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2ztqb"] Mar 20 16:20:42 crc kubenswrapper[4813]: I0320 16:20:42.664213 4813 generic.go:334] "Generic (PLEG): container finished" podID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerID="627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93" exitCode=0 Mar 20 16:20:42 crc kubenswrapper[4813]: I0320 16:20:42.664375 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ztqb" event={"ID":"ae06b4b9-c759-4118-b9c1-a03dce5c759b","Type":"ContainerDied","Data":"627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93"} Mar 20 16:20:42 crc kubenswrapper[4813]: I0320 16:20:42.664677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ztqb" event={"ID":"ae06b4b9-c759-4118-b9c1-a03dce5c759b","Type":"ContainerStarted","Data":"f2386d6adfecde83a6ad3f19c3f0b52ce181f2577a28dcd55c7a09abac7946bf"} Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.243232 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336353 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-combined-ca-bundle\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336410 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-credential-keys\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336467 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnhmf\" (UniqueName: \"kubernetes.io/projected/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-kube-api-access-nnhmf\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336541 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-internal-tls-certs\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336571 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-fernet-keys\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336617 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-public-tls-certs\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336647 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-config-data\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.336701 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-scripts\") pod \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\" (UID: \"db6d5669-5b48-48ab-8f34-5cd0ecb91d93\") " Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.343441 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-kube-api-access-nnhmf" (OuterVolumeSpecName: "kube-api-access-nnhmf") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "kube-api-access-nnhmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.344096 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-scripts" (OuterVolumeSpecName: "scripts") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.351835 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.358577 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.374135 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-config-data" (OuterVolumeSpecName: "config-data") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.375732 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.380909 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.388959 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "db6d5669-5b48-48ab-8f34-5cd0ecb91d93" (UID: "db6d5669-5b48-48ab-8f34-5cd0ecb91d93"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.438897 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.438939 4813 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-credential-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.438952 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnhmf\" (UniqueName: \"kubernetes.io/projected/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-kube-api-access-nnhmf\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.438967 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.438979 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-fernet-keys\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.438991 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.439001 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.439010 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db6d5669-5b48-48ab-8f34-5cd0ecb91d93-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.685240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ztqb" event={"ID":"ae06b4b9-c759-4118-b9c1-a03dce5c759b","Type":"ContainerStarted","Data":"2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0"} Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.687750 4813 generic.go:334] "Generic (PLEG): container finished" podID="db6d5669-5b48-48ab-8f34-5cd0ecb91d93" containerID="500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552" exitCode=0 Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.687791 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" event={"ID":"db6d5669-5b48-48ab-8f34-5cd0ecb91d93","Type":"ContainerDied","Data":"500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552"} Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.687798 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.687820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-8469ccf466-qnct6" event={"ID":"db6d5669-5b48-48ab-8f34-5cd0ecb91d93","Type":"ContainerDied","Data":"d2cf3e7c7c6e7f0a17ad573b7b1979a2fb2f5d8a848d6c300172842c24c7198c"} Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.687841 4813 scope.go:117] "RemoveContainer" containerID="500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.736094 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-8469ccf466-qnct6"] Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.740897 4813 scope.go:117] "RemoveContainer" containerID="500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552" Mar 20 16:20:44 crc kubenswrapper[4813]: E0320 16:20:44.741396 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552\": container with ID starting with 500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552 not found: ID does not exist" containerID="500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.741449 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552"} err="failed to get container status \"500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552\": rpc error: code = NotFound desc = could not find container \"500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552\": container with ID starting with 500076c15a3364defa44daed55320b344c998267799b04cab5654a0219176552 not found: ID does not exist" Mar 20 16:20:44 crc kubenswrapper[4813]: I0320 16:20:44.743991 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-8469ccf466-qnct6"] Mar 20 16:20:45 crc kubenswrapper[4813]: I0320 16:20:45.278395 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db6d5669-5b48-48ab-8f34-5cd0ecb91d93" path="/var/lib/kubelet/pods/db6d5669-5b48-48ab-8f34-5cd0ecb91d93/volumes" Mar 20 16:20:45 crc kubenswrapper[4813]: I0320 16:20:45.699956 4813 generic.go:334] "Generic (PLEG): container finished" podID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerID="2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0" exitCode=0 Mar 20 16:20:45 crc kubenswrapper[4813]: I0320 16:20:45.700001 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ztqb" event={"ID":"ae06b4b9-c759-4118-b9c1-a03dce5c759b","Type":"ContainerDied","Data":"2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0"} Mar 20 16:20:46 crc kubenswrapper[4813]: I0320 16:20:46.709986 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ztqb" event={"ID":"ae06b4b9-c759-4118-b9c1-a03dce5c759b","Type":"ContainerStarted","Data":"a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795"} Mar 20 16:20:46 crc kubenswrapper[4813]: I0320 16:20:46.732550 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2ztqb" podStartSLOduration=2.013967256 podStartE2EDuration="5.732532038s" podCreationTimestamp="2026-03-20 16:20:41 +0000 UTC" firstStartedPulling="2026-03-20 16:20:42.666302768 +0000 UTC m=+2572.089005609" lastFinishedPulling="2026-03-20 16:20:46.38486755 +0000 UTC m=+2575.807570391" observedRunningTime="2026-03-20 16:20:46.726099065 +0000 UTC m=+2576.148801906" watchObservedRunningTime="2026-03-20 16:20:46.732532038 +0000 UTC m=+2576.155234879" Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.027719 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.028212 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-central-agent" containerID="cri-o://af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca" gracePeriod=30 Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.028265 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="proxy-httpd" containerID="cri-o://f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42" gracePeriod=30 Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.028330 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="sg-core" containerID="cri-o://49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd" gracePeriod=30 Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.028463 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-notification-agent" containerID="cri-o://8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc" gracePeriod=30 Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.730177 4813 generic.go:334] "Generic (PLEG): container finished" podID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerID="f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42" exitCode=0 Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.730219 4813 generic.go:334] "Generic (PLEG): container finished" podID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerID="49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd" exitCode=2 Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.730228 4813 generic.go:334] "Generic (PLEG): container finished" podID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerID="af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca" exitCode=0 Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.730258 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerDied","Data":"f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42"} Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.730297 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerDied","Data":"49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd"} Mar 20 16:20:48 crc kubenswrapper[4813]: I0320 16:20:48.730309 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerDied","Data":"af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca"} Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.295295 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431507 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-config-data\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431561 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-ceilometer-tls-certs\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431597 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-scripts\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431627 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-log-httpd\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431704 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrbqt\" (UniqueName: \"kubernetes.io/projected/74f77e2d-fc3c-481e-b858-fd507c576f32-kube-api-access-zrbqt\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431758 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-combined-ca-bundle\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431780 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-run-httpd\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.431808 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-sg-core-conf-yaml\") pod \"74f77e2d-fc3c-481e-b858-fd507c576f32\" (UID: \"74f77e2d-fc3c-481e-b858-fd507c576f32\") " Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.433013 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.433190 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.440668 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-scripts" (OuterVolumeSpecName: "scripts") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.456684 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74f77e2d-fc3c-481e-b858-fd507c576f32-kube-api-access-zrbqt" (OuterVolumeSpecName: "kube-api-access-zrbqt") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "kube-api-access-zrbqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.484717 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.493737 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.510392 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.525773 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-config-data" (OuterVolumeSpecName: "config-data") pod "74f77e2d-fc3c-481e-b858-fd507c576f32" (UID: "74f77e2d-fc3c-481e-b858-fd507c576f32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534512 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534546 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534577 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534592 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534604 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534615 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrbqt\" (UniqueName: \"kubernetes.io/projected/74f77e2d-fc3c-481e-b858-fd507c576f32-kube-api-access-zrbqt\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534627 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f77e2d-fc3c-481e-b858-fd507c576f32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.534663 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/74f77e2d-fc3c-481e-b858-fd507c576f32-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.739854 4813 generic.go:334] "Generic (PLEG): container finished" podID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerID="8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc" exitCode=0 Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.739896 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerDied","Data":"8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc"} Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.739928 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"74f77e2d-fc3c-481e-b858-fd507c576f32","Type":"ContainerDied","Data":"cac1ee68f731bee1f0b4b5cf81f5f75f7b6aa7ab3dfa47162cd1865f7aeb80b4"} Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.739930 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.739946 4813 scope.go:117] "RemoveContainer" containerID="f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.758117 4813 scope.go:117] "RemoveContainer" containerID="49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.779662 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.782377 4813 scope.go:117] "RemoveContainer" containerID="8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.794592 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.800829 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.801162 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db6d5669-5b48-48ab-8f34-5cd0ecb91d93" containerName="keystone-api" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.801173 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="db6d5669-5b48-48ab-8f34-5cd0ecb91d93" containerName="keystone-api" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.801183 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-notification-agent" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.801189 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-notification-agent" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.801200 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-central-agent" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.801206 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-central-agent" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.801216 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="sg-core" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.801223 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="sg-core" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.801233 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="proxy-httpd" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.801239 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="proxy-httpd" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.801386 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="proxy-httpd" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.801735 4813 scope.go:117] "RemoveContainer" containerID="af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.802666 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-central-agent" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.802700 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="sg-core" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.802709 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="db6d5669-5b48-48ab-8f34-5cd0ecb91d93" containerName="keystone-api" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.802719 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" containerName="ceilometer-notification-agent" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.804318 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.807099 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.807161 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.807429 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.812925 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.829695 4813 scope.go:117] "RemoveContainer" containerID="f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.830402 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42\": container with ID starting with f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42 not found: ID does not exist" containerID="f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.830452 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42"} err="failed to get container status \"f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42\": rpc error: code = NotFound desc = could not find container \"f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42\": container with ID starting with f2cd6fb30e7e3623ec395bdf8d91197aef34f14eb4395a28b3bf741350953f42 not found: ID does not exist" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.830502 4813 scope.go:117] "RemoveContainer" containerID="49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.831068 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd\": container with ID starting with 49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd not found: ID does not exist" containerID="49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.831115 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd"} err="failed to get container status \"49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd\": rpc error: code = NotFound desc = could not find container \"49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd\": container with ID starting with 49fe51b6d1beb6a3cdb4fb226942b586f7939c9fdc498a0fea26e471e21f2fcd not found: ID does not exist" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.831141 4813 scope.go:117] "RemoveContainer" containerID="8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.831671 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc\": container with ID starting with 8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc not found: ID does not exist" containerID="8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.831705 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc"} err="failed to get container status \"8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc\": rpc error: code = NotFound desc = could not find container \"8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc\": container with ID starting with 8a7115fb2289d1c7579cc66e0df03bcae87495fc99cda8bd91e558fb6a0acbfc not found: ID does not exist" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.831726 4813 scope.go:117] "RemoveContainer" containerID="af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca" Mar 20 16:20:49 crc kubenswrapper[4813]: E0320 16:20:49.832001 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca\": container with ID starting with af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca not found: ID does not exist" containerID="af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.832029 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca"} err="failed to get container status \"af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca\": rpc error: code = NotFound desc = could not find container \"af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca\": container with ID starting with af80506e1da001fafa4fb7c3888aaf2b9eeeedc2ec46c660b5e4c3bdd9d628ca not found: ID does not exist" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941475 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-config-data\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941621 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-scripts\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941678 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-log-httpd\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941706 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-run-httpd\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941733 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb4nw\" (UniqueName: \"kubernetes.io/projected/50b3233f-bb65-45b6-90a2-4a06b426af7b-kube-api-access-lb4nw\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941784 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:49 crc kubenswrapper[4813]: I0320 16:20:49.941813 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043109 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-log-httpd\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043163 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-run-httpd\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043199 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb4nw\" (UniqueName: \"kubernetes.io/projected/50b3233f-bb65-45b6-90a2-4a06b426af7b-kube-api-access-lb4nw\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043252 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043282 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043318 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-config-data\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043343 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-scripts\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043358 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043675 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-log-httpd\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.043809 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-run-httpd\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.047989 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-scripts\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.048044 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.048107 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.048392 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.048794 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-config-data\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.060170 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb4nw\" (UniqueName: \"kubernetes.io/projected/50b3233f-bb65-45b6-90a2-4a06b426af7b-kube-api-access-lb4nw\") pod \"ceilometer-0\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.128592 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.576800 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:20:50 crc kubenswrapper[4813]: W0320 16:20:50.584717 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50b3233f_bb65_45b6_90a2_4a06b426af7b.slice/crio-740bac86a35b174a635f7b1f4080e5443c54023ecc54c823b746c68e9b1515aa WatchSource:0}: Error finding container 740bac86a35b174a635f7b1f4080e5443c54023ecc54c823b746c68e9b1515aa: Status 404 returned error can't find the container with id 740bac86a35b174a635f7b1f4080e5443c54023ecc54c823b746c68e9b1515aa Mar 20 16:20:50 crc kubenswrapper[4813]: I0320 16:20:50.764008 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerStarted","Data":"740bac86a35b174a635f7b1f4080e5443c54023ecc54c823b746c68e9b1515aa"} Mar 20 16:20:51 crc kubenswrapper[4813]: I0320 16:20:51.274900 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74f77e2d-fc3c-481e-b858-fd507c576f32" path="/var/lib/kubelet/pods/74f77e2d-fc3c-481e-b858-fd507c576f32/volumes" Mar 20 16:20:51 crc kubenswrapper[4813]: I0320 16:20:51.781079 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerStarted","Data":"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d"} Mar 20 16:20:51 crc kubenswrapper[4813]: I0320 16:20:51.918395 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:51 crc kubenswrapper[4813]: I0320 16:20:51.918461 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:51 crc kubenswrapper[4813]: I0320 16:20:51.966204 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:52 crc kubenswrapper[4813]: I0320 16:20:52.266222 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:20:52 crc kubenswrapper[4813]: E0320 16:20:52.266611 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:20:52 crc kubenswrapper[4813]: I0320 16:20:52.790388 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerStarted","Data":"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039"} Mar 20 16:20:52 crc kubenswrapper[4813]: I0320 16:20:52.835288 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:53 crc kubenswrapper[4813]: I0320 16:20:53.801209 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerStarted","Data":"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1"} Mar 20 16:20:55 crc kubenswrapper[4813]: I0320 16:20:55.568549 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2ztqb"] Mar 20 16:20:55 crc kubenswrapper[4813]: I0320 16:20:55.569044 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2ztqb" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="registry-server" containerID="cri-o://a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795" gracePeriod=2 Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.560279 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.663922 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-utilities\") pod \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.664034 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gx5z\" (UniqueName: \"kubernetes.io/projected/ae06b4b9-c759-4118-b9c1-a03dce5c759b-kube-api-access-4gx5z\") pod \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.664162 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-catalog-content\") pod \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\" (UID: \"ae06b4b9-c759-4118-b9c1-a03dce5c759b\") " Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.664870 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-utilities" (OuterVolumeSpecName: "utilities") pod "ae06b4b9-c759-4118-b9c1-a03dce5c759b" (UID: "ae06b4b9-c759-4118-b9c1-a03dce5c759b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.668224 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae06b4b9-c759-4118-b9c1-a03dce5c759b-kube-api-access-4gx5z" (OuterVolumeSpecName: "kube-api-access-4gx5z") pod "ae06b4b9-c759-4118-b9c1-a03dce5c759b" (UID: "ae06b4b9-c759-4118-b9c1-a03dce5c759b"). InnerVolumeSpecName "kube-api-access-4gx5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.710697 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae06b4b9-c759-4118-b9c1-a03dce5c759b" (UID: "ae06b4b9-c759-4118-b9c1-a03dce5c759b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.766135 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gx5z\" (UniqueName: \"kubernetes.io/projected/ae06b4b9-c759-4118-b9c1-a03dce5c759b-kube-api-access-4gx5z\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.766180 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.766192 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae06b4b9-c759-4118-b9c1-a03dce5c759b-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.824469 4813 generic.go:334] "Generic (PLEG): container finished" podID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerID="a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795" exitCode=0 Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.824539 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ztqb" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.824534 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ztqb" event={"ID":"ae06b4b9-c759-4118-b9c1-a03dce5c759b","Type":"ContainerDied","Data":"a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795"} Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.824703 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ztqb" event={"ID":"ae06b4b9-c759-4118-b9c1-a03dce5c759b","Type":"ContainerDied","Data":"f2386d6adfecde83a6ad3f19c3f0b52ce181f2577a28dcd55c7a09abac7946bf"} Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.824736 4813 scope.go:117] "RemoveContainer" containerID="a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.827648 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerStarted","Data":"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f"} Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.827774 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.844881 4813 scope.go:117] "RemoveContainer" containerID="2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.859537 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.843415728 podStartE2EDuration="7.859519032s" podCreationTimestamp="2026-03-20 16:20:49 +0000 UTC" firstStartedPulling="2026-03-20 16:20:50.587013886 +0000 UTC m=+2580.009716727" lastFinishedPulling="2026-03-20 16:20:55.6031172 +0000 UTC m=+2585.025820031" observedRunningTime="2026-03-20 16:20:56.855221676 +0000 UTC m=+2586.277924557" watchObservedRunningTime="2026-03-20 16:20:56.859519032 +0000 UTC m=+2586.282221873" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.865312 4813 scope.go:117] "RemoveContainer" containerID="627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.879669 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2ztqb"] Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.885433 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2ztqb"] Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.898437 4813 scope.go:117] "RemoveContainer" containerID="a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795" Mar 20 16:20:56 crc kubenswrapper[4813]: E0320 16:20:56.899121 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795\": container with ID starting with a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795 not found: ID does not exist" containerID="a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.899167 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795"} err="failed to get container status \"a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795\": rpc error: code = NotFound desc = could not find container \"a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795\": container with ID starting with a100613aeee67c106e2ffb169aa986918f6d1c9cc65050978462ce7bfdc0b795 not found: ID does not exist" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.899197 4813 scope.go:117] "RemoveContainer" containerID="2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0" Mar 20 16:20:56 crc kubenswrapper[4813]: E0320 16:20:56.899604 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0\": container with ID starting with 2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0 not found: ID does not exist" containerID="2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.899634 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0"} err="failed to get container status \"2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0\": rpc error: code = NotFound desc = could not find container \"2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0\": container with ID starting with 2e64736f505f3ede51a178dace208da56a67cec9f5cc7ff75e28e11bf66c13f0 not found: ID does not exist" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.899656 4813 scope.go:117] "RemoveContainer" containerID="627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93" Mar 20 16:20:56 crc kubenswrapper[4813]: E0320 16:20:56.899918 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93\": container with ID starting with 627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93 not found: ID does not exist" containerID="627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93" Mar 20 16:20:56 crc kubenswrapper[4813]: I0320 16:20:56.899946 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93"} err="failed to get container status \"627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93\": rpc error: code = NotFound desc = could not find container \"627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93\": container with ID starting with 627b89778dd8b08649ec31cb8c92f6a9e26ade34d2e2171ee2ee70b26f3bdc93 not found: ID does not exist" Mar 20 16:20:57 crc kubenswrapper[4813]: I0320 16:20:57.274542 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" path="/var/lib/kubelet/pods/ae06b4b9-c759-4118-b9c1-a03dce5c759b/volumes" Mar 20 16:21:06 crc kubenswrapper[4813]: I0320 16:21:06.266032 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:21:06 crc kubenswrapper[4813]: E0320 16:21:06.266970 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:21:19 crc kubenswrapper[4813]: I0320 16:21:19.266504 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:21:19 crc kubenswrapper[4813]: E0320 16:21:19.267528 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.136612 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.776008 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw"] Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.784155 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-pcmhw"] Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.845669 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher92df-account-delete-tccz5"] Mar 20 16:21:20 crc kubenswrapper[4813]: E0320 16:21:20.846148 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="extract-content" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.846171 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="extract-content" Mar 20 16:21:20 crc kubenswrapper[4813]: E0320 16:21:20.846220 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="registry-server" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.846229 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="registry-server" Mar 20 16:21:20 crc kubenswrapper[4813]: E0320 16:21:20.846245 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="extract-utilities" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.846255 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="extract-utilities" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.846426 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae06b4b9-c759-4118-b9c1-a03dce5c759b" containerName="registry-server" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.847201 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.853078 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.853295 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="c092d35a-2592-45c5-a196-17f993b66c7e" containerName="watcher-decision-engine" containerID="cri-o://4332d088397ffa5bef8f4c0fd01832136c2d7e36f2cc2070dde5f1c4b95e0d74" gracePeriod=30 Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.867985 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher92df-account-delete-tccz5"] Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.927418 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.927759 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-kuttl-api-log" containerID="cri-o://26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba" gracePeriod=30 Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.929164 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-api" containerID="cri-o://2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07" gracePeriod=30 Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.952104 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-operator-scripts\") pod \"watcher92df-account-delete-tccz5\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.952159 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8686p\" (UniqueName: \"kubernetes.io/projected/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-kube-api-access-8686p\") pod \"watcher92df-account-delete-tccz5\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.998052 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:21:20 crc kubenswrapper[4813]: I0320 16:21:20.998259 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="00d24df6-eafd-481a-8380-7e4b949304fc" containerName="watcher-applier" containerID="cri-o://b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60" gracePeriod=30 Mar 20 16:21:21 crc kubenswrapper[4813]: I0320 16:21:21.054795 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-operator-scripts\") pod \"watcher92df-account-delete-tccz5\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:21 crc kubenswrapper[4813]: I0320 16:21:21.054882 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8686p\" (UniqueName: \"kubernetes.io/projected/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-kube-api-access-8686p\") pod \"watcher92df-account-delete-tccz5\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:21 crc kubenswrapper[4813]: I0320 16:21:21.055664 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-operator-scripts\") pod \"watcher92df-account-delete-tccz5\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:21 crc kubenswrapper[4813]: I0320 16:21:21.077377 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8686p\" (UniqueName: \"kubernetes.io/projected/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-kube-api-access-8686p\") pod \"watcher92df-account-delete-tccz5\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:21 crc kubenswrapper[4813]: I0320 16:21:21.167892 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:21 crc kubenswrapper[4813]: I0320 16:21:21.276946 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd5cfafd-67cb-47e4-a9b0-739d95b77a6b" path="/var/lib/kubelet/pods/bd5cfafd-67cb-47e4-a9b0-739d95b77a6b/volumes" Mar 20 16:21:21 crc kubenswrapper[4813]: I0320 16:21:21.720461 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher92df-account-delete-tccz5"] Mar 20 16:21:21 crc kubenswrapper[4813]: W0320 16:21:21.726222 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f3a2fc2_a32a_4ebe_953d_f3a1a76a014f.slice/crio-ddd46f3a494f6f31e86d4bb318c20c89cd76c794df9d5bcb3c2b8eab3ae1b75a WatchSource:0}: Error finding container ddd46f3a494f6f31e86d4bb318c20c89cd76c794df9d5bcb3c2b8eab3ae1b75a: Status 404 returned error can't find the container with id ddd46f3a494f6f31e86d4bb318c20c89cd76c794df9d5bcb3c2b8eab3ae1b75a Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.058220 4813 generic.go:334] "Generic (PLEG): container finished" podID="c092d35a-2592-45c5-a196-17f993b66c7e" containerID="4332d088397ffa5bef8f4c0fd01832136c2d7e36f2cc2070dde5f1c4b95e0d74" exitCode=0 Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.058513 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"c092d35a-2592-45c5-a196-17f993b66c7e","Type":"ContainerDied","Data":"4332d088397ffa5bef8f4c0fd01832136c2d7e36f2cc2070dde5f1c4b95e0d74"} Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.059991 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" event={"ID":"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f","Type":"ContainerStarted","Data":"4e5aaf8a3ff11a246a2864b20214d0fdb566ee007b31116e096ff536cfb8c72a"} Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.060043 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" event={"ID":"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f","Type":"ContainerStarted","Data":"ddd46f3a494f6f31e86d4bb318c20c89cd76c794df9d5bcb3c2b8eab3ae1b75a"} Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.062895 4813 generic.go:334] "Generic (PLEG): container finished" podID="660b0cb7-523e-488a-b267-43331525e435" containerID="26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba" exitCode=143 Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.062968 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"660b0cb7-523e-488a-b267-43331525e435","Type":"ContainerDied","Data":"26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba"} Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.085331 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" podStartSLOduration=2.085310139 podStartE2EDuration="2.085310139s" podCreationTimestamp="2026-03-20 16:21:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:21:22.078598018 +0000 UTC m=+2611.501300859" watchObservedRunningTime="2026-03-20 16:21:22.085310139 +0000 UTC m=+2611.508012980" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.292674 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.388543 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-cert-memcached-mtls\") pod \"c092d35a-2592-45c5-a196-17f993b66c7e\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.482232 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "c092d35a-2592-45c5-a196-17f993b66c7e" (UID: "c092d35a-2592-45c5-a196-17f993b66c7e"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.491786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c092d35a-2592-45c5-a196-17f993b66c7e-logs\") pod \"c092d35a-2592-45c5-a196-17f993b66c7e\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.491842 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-config-data\") pod \"c092d35a-2592-45c5-a196-17f993b66c7e\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.491880 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-combined-ca-bundle\") pod \"c092d35a-2592-45c5-a196-17f993b66c7e\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.491907 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md2bf\" (UniqueName: \"kubernetes.io/projected/c092d35a-2592-45c5-a196-17f993b66c7e-kube-api-access-md2bf\") pod \"c092d35a-2592-45c5-a196-17f993b66c7e\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.491939 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-custom-prometheus-ca\") pod \"c092d35a-2592-45c5-a196-17f993b66c7e\" (UID: \"c092d35a-2592-45c5-a196-17f993b66c7e\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.492321 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.492827 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c092d35a-2592-45c5-a196-17f993b66c7e-logs" (OuterVolumeSpecName: "logs") pod "c092d35a-2592-45c5-a196-17f993b66c7e" (UID: "c092d35a-2592-45c5-a196-17f993b66c7e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.498292 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c092d35a-2592-45c5-a196-17f993b66c7e-kube-api-access-md2bf" (OuterVolumeSpecName: "kube-api-access-md2bf") pod "c092d35a-2592-45c5-a196-17f993b66c7e" (UID: "c092d35a-2592-45c5-a196-17f993b66c7e"). InnerVolumeSpecName "kube-api-access-md2bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.514736 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c092d35a-2592-45c5-a196-17f993b66c7e" (UID: "c092d35a-2592-45c5-a196-17f993b66c7e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.550238 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "c092d35a-2592-45c5-a196-17f993b66c7e" (UID: "c092d35a-2592-45c5-a196-17f993b66c7e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.580225 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.582029 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-config-data" (OuterVolumeSpecName: "config-data") pod "c092d35a-2592-45c5-a196-17f993b66c7e" (UID: "c092d35a-2592-45c5-a196-17f993b66c7e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.592786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-custom-prometheus-ca\") pod \"660b0cb7-523e-488a-b267-43331525e435\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.592847 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6kzr\" (UniqueName: \"kubernetes.io/projected/660b0cb7-523e-488a-b267-43331525e435-kube-api-access-j6kzr\") pod \"660b0cb7-523e-488a-b267-43331525e435\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.592898 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls\") pod \"660b0cb7-523e-488a-b267-43331525e435\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.592935 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-config-data\") pod \"660b0cb7-523e-488a-b267-43331525e435\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.592954 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-combined-ca-bundle\") pod \"660b0cb7-523e-488a-b267-43331525e435\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.592980 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660b0cb7-523e-488a-b267-43331525e435-logs\") pod \"660b0cb7-523e-488a-b267-43331525e435\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.593266 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c092d35a-2592-45c5-a196-17f993b66c7e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.593282 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.593293 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.593305 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md2bf\" (UniqueName: \"kubernetes.io/projected/c092d35a-2592-45c5-a196-17f993b66c7e-kube-api-access-md2bf\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.593316 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/c092d35a-2592-45c5-a196-17f993b66c7e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.593691 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/660b0cb7-523e-488a-b267-43331525e435-logs" (OuterVolumeSpecName: "logs") pod "660b0cb7-523e-488a-b267-43331525e435" (UID: "660b0cb7-523e-488a-b267-43331525e435"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.596280 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/660b0cb7-523e-488a-b267-43331525e435-kube-api-access-j6kzr" (OuterVolumeSpecName: "kube-api-access-j6kzr") pod "660b0cb7-523e-488a-b267-43331525e435" (UID: "660b0cb7-523e-488a-b267-43331525e435"). InnerVolumeSpecName "kube-api-access-j6kzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.628038 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "660b0cb7-523e-488a-b267-43331525e435" (UID: "660b0cb7-523e-488a-b267-43331525e435"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.662901 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "660b0cb7-523e-488a-b267-43331525e435" (UID: "660b0cb7-523e-488a-b267-43331525e435"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.676660 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-config-data" (OuterVolumeSpecName: "config-data") pod "660b0cb7-523e-488a-b267-43331525e435" (UID: "660b0cb7-523e-488a-b267-43331525e435"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.693748 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "660b0cb7-523e-488a-b267-43331525e435" (UID: "660b0cb7-523e-488a-b267-43331525e435"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694152 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls\") pod \"660b0cb7-523e-488a-b267-43331525e435\" (UID: \"660b0cb7-523e-488a-b267-43331525e435\") " Mar 20 16:21:22 crc kubenswrapper[4813]: W0320 16:21:22.694289 4813 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/660b0cb7-523e-488a-b267-43331525e435/volumes/kubernetes.io~secret/cert-memcached-mtls Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694307 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "660b0cb7-523e-488a-b267-43331525e435" (UID: "660b0cb7-523e-488a-b267-43331525e435"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694508 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694526 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694535 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694544 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660b0cb7-523e-488a-b267-43331525e435-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694554 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/660b0cb7-523e-488a-b267-43331525e435-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:22 crc kubenswrapper[4813]: I0320 16:21:22.694563 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6kzr\" (UniqueName: \"kubernetes.io/projected/660b0cb7-523e-488a-b267-43331525e435-kube-api-access-j6kzr\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.072089 4813 generic.go:334] "Generic (PLEG): container finished" podID="5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f" containerID="4e5aaf8a3ff11a246a2864b20214d0fdb566ee007b31116e096ff536cfb8c72a" exitCode=0 Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.072446 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" event={"ID":"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f","Type":"ContainerDied","Data":"4e5aaf8a3ff11a246a2864b20214d0fdb566ee007b31116e096ff536cfb8c72a"} Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.074568 4813 generic.go:334] "Generic (PLEG): container finished" podID="660b0cb7-523e-488a-b267-43331525e435" containerID="2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07" exitCode=0 Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.074684 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"660b0cb7-523e-488a-b267-43331525e435","Type":"ContainerDied","Data":"2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07"} Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.074736 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"660b0cb7-523e-488a-b267-43331525e435","Type":"ContainerDied","Data":"16563f7aeadd4a3de67c54eda39c7a36971a18987947d4c5fab89274f885c887"} Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.074768 4813 scope.go:117] "RemoveContainer" containerID="2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.074894 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.078969 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"c092d35a-2592-45c5-a196-17f993b66c7e","Type":"ContainerDied","Data":"4477d49990e8b7abb681b2b04afcdbbfb53f67cafd0c4b8ab7e57a08036513d5"} Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.079110 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.218295 4813 scope.go:117] "RemoveContainer" containerID="26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.234293 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.241940 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.251862 4813 scope.go:117] "RemoveContainer" containerID="2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.251980 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:21:23 crc kubenswrapper[4813]: E0320 16:21:23.252359 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07\": container with ID starting with 2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07 not found: ID does not exist" containerID="2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.252399 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07"} err="failed to get container status \"2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07\": rpc error: code = NotFound desc = could not find container \"2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07\": container with ID starting with 2ea710d93bd7488a22fdc15e165361299dfeeb9315e6f2fe26e4294cfb0f8a07 not found: ID does not exist" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.252425 4813 scope.go:117] "RemoveContainer" containerID="26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba" Mar 20 16:21:23 crc kubenswrapper[4813]: E0320 16:21:23.252883 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba\": container with ID starting with 26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba not found: ID does not exist" containerID="26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.252930 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba"} err="failed to get container status \"26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba\": rpc error: code = NotFound desc = could not find container \"26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba\": container with ID starting with 26c541ee1fcbbf50c121938456e3ef05b2776d669496d3138b678c214a5503ba not found: ID does not exist" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.252971 4813 scope.go:117] "RemoveContainer" containerID="4332d088397ffa5bef8f4c0fd01832136c2d7e36f2cc2070dde5f1c4b95e0d74" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.259373 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.282247 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="660b0cb7-523e-488a-b267-43331525e435" path="/var/lib/kubelet/pods/660b0cb7-523e-488a-b267-43331525e435/volumes" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.282973 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c092d35a-2592-45c5-a196-17f993b66c7e" path="/var/lib/kubelet/pods/c092d35a-2592-45c5-a196-17f993b66c7e/volumes" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.479825 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.516885 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-combined-ca-bundle\") pod \"00d24df6-eafd-481a-8380-7e4b949304fc\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.516938 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-config-data\") pod \"00d24df6-eafd-481a-8380-7e4b949304fc\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.517008 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d24df6-eafd-481a-8380-7e4b949304fc-logs\") pod \"00d24df6-eafd-481a-8380-7e4b949304fc\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.517034 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-cert-memcached-mtls\") pod \"00d24df6-eafd-481a-8380-7e4b949304fc\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.517056 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr2tc\" (UniqueName: \"kubernetes.io/projected/00d24df6-eafd-481a-8380-7e4b949304fc-kube-api-access-vr2tc\") pod \"00d24df6-eafd-481a-8380-7e4b949304fc\" (UID: \"00d24df6-eafd-481a-8380-7e4b949304fc\") " Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.517977 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00d24df6-eafd-481a-8380-7e4b949304fc-logs" (OuterVolumeSpecName: "logs") pod "00d24df6-eafd-481a-8380-7e4b949304fc" (UID: "00d24df6-eafd-481a-8380-7e4b949304fc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.521429 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00d24df6-eafd-481a-8380-7e4b949304fc-kube-api-access-vr2tc" (OuterVolumeSpecName: "kube-api-access-vr2tc") pod "00d24df6-eafd-481a-8380-7e4b949304fc" (UID: "00d24df6-eafd-481a-8380-7e4b949304fc"). InnerVolumeSpecName "kube-api-access-vr2tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.558769 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00d24df6-eafd-481a-8380-7e4b949304fc" (UID: "00d24df6-eafd-481a-8380-7e4b949304fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.581752 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-config-data" (OuterVolumeSpecName: "config-data") pod "00d24df6-eafd-481a-8380-7e4b949304fc" (UID: "00d24df6-eafd-481a-8380-7e4b949304fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.598671 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "00d24df6-eafd-481a-8380-7e4b949304fc" (UID: "00d24df6-eafd-481a-8380-7e4b949304fc"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.618846 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.619068 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.619157 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00d24df6-eafd-481a-8380-7e4b949304fc-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.619221 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/00d24df6-eafd-481a-8380-7e4b949304fc-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:23 crc kubenswrapper[4813]: I0320 16:21:23.619283 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr2tc\" (UniqueName: \"kubernetes.io/projected/00d24df6-eafd-481a-8380-7e4b949304fc-kube-api-access-vr2tc\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.060993 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.061720 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-central-agent" containerID="cri-o://6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" gracePeriod=30 Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.061818 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-notification-agent" containerID="cri-o://9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" gracePeriod=30 Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.061818 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="sg-core" containerID="cri-o://4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" gracePeriod=30 Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.062112 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="proxy-httpd" containerID="cri-o://33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" gracePeriod=30 Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.091927 4813 generic.go:334] "Generic (PLEG): container finished" podID="00d24df6-eafd-481a-8380-7e4b949304fc" containerID="b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60" exitCode=0 Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.092140 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.092926 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"00d24df6-eafd-481a-8380-7e4b949304fc","Type":"ContainerDied","Data":"b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60"} Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.092972 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"00d24df6-eafd-481a-8380-7e4b949304fc","Type":"ContainerDied","Data":"44119ffcb27963fdf2c136ff180edbe9d5d858e3f34d9c9532381063340c6252"} Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.092992 4813 scope.go:117] "RemoveContainer" containerID="b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.128060 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.139215 4813 scope.go:117] "RemoveContainer" containerID="b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.139913 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:21:24 crc kubenswrapper[4813]: E0320 16:21:24.140045 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60\": container with ID starting with b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60 not found: ID does not exist" containerID="b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.140137 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60"} err="failed to get container status \"b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60\": rpc error: code = NotFound desc = could not find container \"b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60\": container with ID starting with b74a96a15e9b90034734c146904fd0f885ea9d164718c53daea21636f1bbba60 not found: ID does not exist" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.546007 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.738044 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8686p\" (UniqueName: \"kubernetes.io/projected/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-kube-api-access-8686p\") pod \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.738134 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-operator-scripts\") pod \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\" (UID: \"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f\") " Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.739354 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f" (UID: "5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.745058 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-kube-api-access-8686p" (OuterVolumeSpecName: "kube-api-access-8686p") pod "5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f" (UID: "5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f"). InnerVolumeSpecName "kube-api-access-8686p". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.841910 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8686p\" (UniqueName: \"kubernetes.io/projected/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-kube-api-access-8686p\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.841953 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:24 crc kubenswrapper[4813]: I0320 16:21:24.962671 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.101158 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.101149 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher92df-account-delete-tccz5" event={"ID":"5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f","Type":"ContainerDied","Data":"ddd46f3a494f6f31e86d4bb318c20c89cd76c794df9d5bcb3c2b8eab3ae1b75a"} Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.101341 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddd46f3a494f6f31e86d4bb318c20c89cd76c794df9d5bcb3c2b8eab3ae1b75a" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103371 4813 generic.go:334] "Generic (PLEG): container finished" podID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerID="33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" exitCode=0 Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103396 4813 generic.go:334] "Generic (PLEG): container finished" podID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerID="4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" exitCode=2 Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103408 4813 generic.go:334] "Generic (PLEG): container finished" podID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerID="9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" exitCode=0 Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103419 4813 generic.go:334] "Generic (PLEG): container finished" podID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerID="6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" exitCode=0 Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103435 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerDied","Data":"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f"} Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerDied","Data":"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1"} Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103466 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerDied","Data":"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039"} Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103511 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerDied","Data":"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d"} Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103526 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50b3233f-bb65-45b6-90a2-4a06b426af7b","Type":"ContainerDied","Data":"740bac86a35b174a635f7b1f4080e5443c54023ecc54c823b746c68e9b1515aa"} Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103544 4813 scope.go:117] "RemoveContainer" containerID="33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.103642 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.131903 4813 scope.go:117] "RemoveContainer" containerID="4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.146827 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-sg-core-conf-yaml\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147108 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-combined-ca-bundle\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147149 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-ceilometer-tls-certs\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147197 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-run-httpd\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147235 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-scripts\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147262 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-log-httpd\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147620 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147654 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb4nw\" (UniqueName: \"kubernetes.io/projected/50b3233f-bb65-45b6-90a2-4a06b426af7b-kube-api-access-lb4nw\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.147691 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-config-data\") pod \"50b3233f-bb65-45b6-90a2-4a06b426af7b\" (UID: \"50b3233f-bb65-45b6-90a2-4a06b426af7b\") " Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.148075 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.148693 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.151653 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50b3233f-bb65-45b6-90a2-4a06b426af7b-kube-api-access-lb4nw" (OuterVolumeSpecName: "kube-api-access-lb4nw") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "kube-api-access-lb4nw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.157377 4813 scope.go:117] "RemoveContainer" containerID="9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.168611 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-scripts" (OuterVolumeSpecName: "scripts") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.171064 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.203591 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.209620 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.239110 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-config-data" (OuterVolumeSpecName: "config-data") pod "50b3233f-bb65-45b6-90a2-4a06b426af7b" (UID: "50b3233f-bb65-45b6-90a2-4a06b426af7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.249685 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb4nw\" (UniqueName: \"kubernetes.io/projected/50b3233f-bb65-45b6-90a2-4a06b426af7b-kube-api-access-lb4nw\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.249743 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.249758 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.249770 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.249782 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.249795 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50b3233f-bb65-45b6-90a2-4a06b426af7b-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.249830 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50b3233f-bb65-45b6-90a2-4a06b426af7b-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.282192 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00d24df6-eafd-481a-8380-7e4b949304fc" path="/var/lib/kubelet/pods/00d24df6-eafd-481a-8380-7e4b949304fc/volumes" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.307592 4813 scope.go:117] "RemoveContainer" containerID="6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.327742 4813 scope.go:117] "RemoveContainer" containerID="33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.328274 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": container with ID starting with 33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f not found: ID does not exist" containerID="33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.328311 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f"} err="failed to get container status \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": rpc error: code = NotFound desc = could not find container \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": container with ID starting with 33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.328330 4813 scope.go:117] "RemoveContainer" containerID="4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.328914 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": container with ID starting with 4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1 not found: ID does not exist" containerID="4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.328957 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1"} err="failed to get container status \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": rpc error: code = NotFound desc = could not find container \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": container with ID starting with 4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.329013 4813 scope.go:117] "RemoveContainer" containerID="9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.329394 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": container with ID starting with 9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039 not found: ID does not exist" containerID="9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.329418 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039"} err="failed to get container status \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": rpc error: code = NotFound desc = could not find container \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": container with ID starting with 9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.329434 4813 scope.go:117] "RemoveContainer" containerID="6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.329740 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": container with ID starting with 6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d not found: ID does not exist" containerID="6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.329763 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d"} err="failed to get container status \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": rpc error: code = NotFound desc = could not find container \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": container with ID starting with 6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.329777 4813 scope.go:117] "RemoveContainer" containerID="33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330008 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f"} err="failed to get container status \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": rpc error: code = NotFound desc = could not find container \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": container with ID starting with 33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330028 4813 scope.go:117] "RemoveContainer" containerID="4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330345 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1"} err="failed to get container status \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": rpc error: code = NotFound desc = could not find container \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": container with ID starting with 4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330369 4813 scope.go:117] "RemoveContainer" containerID="9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330647 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039"} err="failed to get container status \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": rpc error: code = NotFound desc = could not find container \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": container with ID starting with 9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330670 4813 scope.go:117] "RemoveContainer" containerID="6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330898 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d"} err="failed to get container status \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": rpc error: code = NotFound desc = could not find container \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": container with ID starting with 6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.330917 4813 scope.go:117] "RemoveContainer" containerID="33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.331128 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f"} err="failed to get container status \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": rpc error: code = NotFound desc = could not find container \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": container with ID starting with 33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.331147 4813 scope.go:117] "RemoveContainer" containerID="4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.331345 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1"} err="failed to get container status \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": rpc error: code = NotFound desc = could not find container \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": container with ID starting with 4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.331365 4813 scope.go:117] "RemoveContainer" containerID="9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.331634 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039"} err="failed to get container status \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": rpc error: code = NotFound desc = could not find container \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": container with ID starting with 9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.331654 4813 scope.go:117] "RemoveContainer" containerID="6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.333231 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d"} err="failed to get container status \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": rpc error: code = NotFound desc = could not find container \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": container with ID starting with 6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.333256 4813 scope.go:117] "RemoveContainer" containerID="33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.333542 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f"} err="failed to get container status \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": rpc error: code = NotFound desc = could not find container \"33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f\": container with ID starting with 33b46fcf13a961f82e5a4e2aa0a12301bbedaa55fa30744b85d3fd2ce9830a8f not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.333566 4813 scope.go:117] "RemoveContainer" containerID="4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.337772 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1"} err="failed to get container status \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": rpc error: code = NotFound desc = could not find container \"4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1\": container with ID starting with 4c0cf7772eb61113f3e006a24b064d9971bc1ce6c272ca0bf9d1e222040e92a1 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.337795 4813 scope.go:117] "RemoveContainer" containerID="9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.338750 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039"} err="failed to get container status \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": rpc error: code = NotFound desc = could not find container \"9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039\": container with ID starting with 9a05c7667004560f4a9ce216408fb594a701a19f9d2d6c719a81fa5f563b3039 not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.338771 4813 scope.go:117] "RemoveContainer" containerID="6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.341663 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d"} err="failed to get container status \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": rpc error: code = NotFound desc = could not find container \"6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d\": container with ID starting with 6d0f5759d74295dc4761d52fc08720867addce3241f1346c901a151399cbec0d not found: ID does not exist" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.431783 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.439345 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.459961 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460321 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-notification-agent" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460344 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-notification-agent" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460359 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="proxy-httpd" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460365 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="proxy-httpd" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460382 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-kuttl-api-log" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460388 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-kuttl-api-log" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460399 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-central-agent" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460405 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-central-agent" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460415 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00d24df6-eafd-481a-8380-7e4b949304fc" containerName="watcher-applier" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460421 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="00d24df6-eafd-481a-8380-7e4b949304fc" containerName="watcher-applier" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460433 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-api" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460438 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-api" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460448 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c092d35a-2592-45c5-a196-17f993b66c7e" containerName="watcher-decision-engine" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460454 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c092d35a-2592-45c5-a196-17f993b66c7e" containerName="watcher-decision-engine" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460465 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="sg-core" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460470 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="sg-core" Mar 20 16:21:25 crc kubenswrapper[4813]: E0320 16:21:25.460501 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f" containerName="mariadb-account-delete" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460507 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f" containerName="mariadb-account-delete" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460667 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-api" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460685 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-central-agent" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460696 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="proxy-httpd" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460705 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="00d24df6-eafd-481a-8380-7e4b949304fc" containerName="watcher-applier" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460713 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c092d35a-2592-45c5-a196-17f993b66c7e" containerName="watcher-decision-engine" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460725 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="660b0cb7-523e-488a-b267-43331525e435" containerName="watcher-kuttl-api-log" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460733 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="sg-core" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460741 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" containerName="ceilometer-notification-agent" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.460752 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f" containerName="mariadb-account-delete" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.464546 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.466948 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.467229 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.468362 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.472388 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658436 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-run-httpd\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658529 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-log-httpd\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658555 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-scripts\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658577 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658592 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658609 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfqnt\" (UniqueName: \"kubernetes.io/projected/c9f704e1-7801-45e1-a3ff-d9318219687a-kube-api-access-gfqnt\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658730 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-config-data\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.658759 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760079 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-run-httpd\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760157 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-log-httpd\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760182 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-scripts\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760205 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760226 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760242 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfqnt\" (UniqueName: \"kubernetes.io/projected/c9f704e1-7801-45e1-a3ff-d9318219687a-kube-api-access-gfqnt\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760268 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-config-data\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760284 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.760816 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-run-httpd\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.761042 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-log-httpd\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.763983 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.764335 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.765388 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.765710 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-config-data\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.767058 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-scripts\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.783178 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfqnt\" (UniqueName: \"kubernetes.io/projected/c9f704e1-7801-45e1-a3ff-d9318219687a-kube-api-access-gfqnt\") pod \"ceilometer-0\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.867033 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-lk472"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.883511 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-lk472"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.889527 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher92df-account-delete-tccz5"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.896306 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-92df-account-create-update-7tw4r"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.902031 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher92df-account-delete-tccz5"] Mar 20 16:21:25 crc kubenswrapper[4813]: I0320 16:21:25.907460 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-92df-account-create-update-7tw4r"] Mar 20 16:21:26 crc kubenswrapper[4813]: I0320 16:21:26.082931 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:26 crc kubenswrapper[4813]: I0320 16:21:26.575361 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:26 crc kubenswrapper[4813]: I0320 16:21:26.580625 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.123301 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerStarted","Data":"bcb1f5bb9c692ebb2b1ddb924e3d800b8356142aca0bbbbb5d58cd4d1edeb04e"} Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.278181 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19c8777f-603b-48df-b375-555b07680c55" path="/var/lib/kubelet/pods/19c8777f-603b-48df-b375-555b07680c55/volumes" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.278897 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="230e4fdf-16a8-426a-8b94-906c516a84b5" path="/var/lib/kubelet/pods/230e4fdf-16a8-426a-8b94-906c516a84b5/volumes" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.279628 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50b3233f-bb65-45b6-90a2-4a06b426af7b" path="/var/lib/kubelet/pods/50b3233f-bb65-45b6-90a2-4a06b426af7b/volumes" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.281195 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f" path="/var/lib/kubelet/pods/5f3a2fc2-a32a-4ebe-953d-f3a1a76a014f/volumes" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.442232 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-m9s9w"] Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.443773 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.448639 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-9cc2-account-create-update-929sf"] Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.449570 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.452332 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.458037 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-m9s9w"] Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.479786 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-9cc2-account-create-update-929sf"] Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.599013 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfqvf\" (UniqueName: \"kubernetes.io/projected/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-kube-api-access-kfqvf\") pod \"watcher-db-create-m9s9w\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.599089 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx7ls\" (UniqueName: \"kubernetes.io/projected/bc6d142f-eb92-4213-93b8-d10527328b9b-kube-api-access-lx7ls\") pod \"watcher-9cc2-account-create-update-929sf\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.599593 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc6d142f-eb92-4213-93b8-d10527328b9b-operator-scripts\") pod \"watcher-9cc2-account-create-update-929sf\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.599654 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-operator-scripts\") pod \"watcher-db-create-m9s9w\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.700917 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc6d142f-eb92-4213-93b8-d10527328b9b-operator-scripts\") pod \"watcher-9cc2-account-create-update-929sf\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.700981 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-operator-scripts\") pod \"watcher-db-create-m9s9w\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.701050 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfqvf\" (UniqueName: \"kubernetes.io/projected/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-kube-api-access-kfqvf\") pod \"watcher-db-create-m9s9w\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.701097 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx7ls\" (UniqueName: \"kubernetes.io/projected/bc6d142f-eb92-4213-93b8-d10527328b9b-kube-api-access-lx7ls\") pod \"watcher-9cc2-account-create-update-929sf\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.701666 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-operator-scripts\") pod \"watcher-db-create-m9s9w\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.701666 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc6d142f-eb92-4213-93b8-d10527328b9b-operator-scripts\") pod \"watcher-9cc2-account-create-update-929sf\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.718042 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx7ls\" (UniqueName: \"kubernetes.io/projected/bc6d142f-eb92-4213-93b8-d10527328b9b-kube-api-access-lx7ls\") pod \"watcher-9cc2-account-create-update-929sf\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.728171 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfqvf\" (UniqueName: \"kubernetes.io/projected/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-kube-api-access-kfqvf\") pod \"watcher-db-create-m9s9w\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.761571 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:27 crc kubenswrapper[4813]: I0320 16:21:27.775800 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:28 crc kubenswrapper[4813]: I0320 16:21:28.138872 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerStarted","Data":"8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074"} Mar 20 16:21:28 crc kubenswrapper[4813]: I0320 16:21:28.139124 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerStarted","Data":"1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6"} Mar 20 16:21:28 crc kubenswrapper[4813]: I0320 16:21:28.290379 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-m9s9w"] Mar 20 16:21:28 crc kubenswrapper[4813]: I0320 16:21:28.397438 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-9cc2-account-create-update-929sf"] Mar 20 16:21:29 crc kubenswrapper[4813]: I0320 16:21:29.151831 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerStarted","Data":"6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997"} Mar 20 16:21:29 crc kubenswrapper[4813]: I0320 16:21:29.153880 4813 generic.go:334] "Generic (PLEG): container finished" podID="63a2a35d-aaa9-4e42-8c4d-bb20f4095984" containerID="aea28451ed8059a135210fea4ab34e3fc4bcf158fd3542818a36ed0ee0c6d13a" exitCode=0 Mar 20 16:21:29 crc kubenswrapper[4813]: I0320 16:21:29.153927 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-m9s9w" event={"ID":"63a2a35d-aaa9-4e42-8c4d-bb20f4095984","Type":"ContainerDied","Data":"aea28451ed8059a135210fea4ab34e3fc4bcf158fd3542818a36ed0ee0c6d13a"} Mar 20 16:21:29 crc kubenswrapper[4813]: I0320 16:21:29.153944 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-m9s9w" event={"ID":"63a2a35d-aaa9-4e42-8c4d-bb20f4095984","Type":"ContainerStarted","Data":"055d3ab8cebebbe33e6b0bc90de6bb6498605b36d197c13fa0147e3b385d80c0"} Mar 20 16:21:29 crc kubenswrapper[4813]: I0320 16:21:29.157312 4813 generic.go:334] "Generic (PLEG): container finished" podID="bc6d142f-eb92-4213-93b8-d10527328b9b" containerID="6e78257eb6b5aa83d204d8551cb15c21ca778b5facf51ba09ebc2dfb5bc40eab" exitCode=0 Mar 20 16:21:29 crc kubenswrapper[4813]: I0320 16:21:29.157346 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" event={"ID":"bc6d142f-eb92-4213-93b8-d10527328b9b","Type":"ContainerDied","Data":"6e78257eb6b5aa83d204d8551cb15c21ca778b5facf51ba09ebc2dfb5bc40eab"} Mar 20 16:21:29 crc kubenswrapper[4813]: I0320 16:21:29.157369 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" event={"ID":"bc6d142f-eb92-4213-93b8-d10527328b9b","Type":"ContainerStarted","Data":"abe00f460f4de7346b849d1fa390f399b69c02bac2ac861f8ed03bc5012648d7"} Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.723654 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.730126 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.759825 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfqvf\" (UniqueName: \"kubernetes.io/projected/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-kube-api-access-kfqvf\") pod \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.759870 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx7ls\" (UniqueName: \"kubernetes.io/projected/bc6d142f-eb92-4213-93b8-d10527328b9b-kube-api-access-lx7ls\") pod \"bc6d142f-eb92-4213-93b8-d10527328b9b\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.759890 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc6d142f-eb92-4213-93b8-d10527328b9b-operator-scripts\") pod \"bc6d142f-eb92-4213-93b8-d10527328b9b\" (UID: \"bc6d142f-eb92-4213-93b8-d10527328b9b\") " Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.759911 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-operator-scripts\") pod \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\" (UID: \"63a2a35d-aaa9-4e42-8c4d-bb20f4095984\") " Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.760538 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc6d142f-eb92-4213-93b8-d10527328b9b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bc6d142f-eb92-4213-93b8-d10527328b9b" (UID: "bc6d142f-eb92-4213-93b8-d10527328b9b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.760751 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc6d142f-eb92-4213-93b8-d10527328b9b-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.763677 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc6d142f-eb92-4213-93b8-d10527328b9b-kube-api-access-lx7ls" (OuterVolumeSpecName: "kube-api-access-lx7ls") pod "bc6d142f-eb92-4213-93b8-d10527328b9b" (UID: "bc6d142f-eb92-4213-93b8-d10527328b9b"). InnerVolumeSpecName "kube-api-access-lx7ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.763717 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-kube-api-access-kfqvf" (OuterVolumeSpecName: "kube-api-access-kfqvf") pod "63a2a35d-aaa9-4e42-8c4d-bb20f4095984" (UID: "63a2a35d-aaa9-4e42-8c4d-bb20f4095984"). InnerVolumeSpecName "kube-api-access-kfqvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.763788 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "63a2a35d-aaa9-4e42-8c4d-bb20f4095984" (UID: "63a2a35d-aaa9-4e42-8c4d-bb20f4095984"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.862161 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfqvf\" (UniqueName: \"kubernetes.io/projected/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-kube-api-access-kfqvf\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.862507 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx7ls\" (UniqueName: \"kubernetes.io/projected/bc6d142f-eb92-4213-93b8-d10527328b9b-kube-api-access-lx7ls\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:30 crc kubenswrapper[4813]: I0320 16:21:30.862517 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63a2a35d-aaa9-4e42-8c4d-bb20f4095984-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.175349 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerStarted","Data":"28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f"} Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.176765 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.179388 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-m9s9w" event={"ID":"63a2a35d-aaa9-4e42-8c4d-bb20f4095984","Type":"ContainerDied","Data":"055d3ab8cebebbe33e6b0bc90de6bb6498605b36d197c13fa0147e3b385d80c0"} Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.179412 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-m9s9w" Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.179423 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="055d3ab8cebebbe33e6b0bc90de6bb6498605b36d197c13fa0147e3b385d80c0" Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.181979 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" event={"ID":"bc6d142f-eb92-4213-93b8-d10527328b9b","Type":"ContainerDied","Data":"abe00f460f4de7346b849d1fa390f399b69c02bac2ac861f8ed03bc5012648d7"} Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.182030 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abe00f460f4de7346b849d1fa390f399b69c02bac2ac861f8ed03bc5012648d7" Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.182071 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-9cc2-account-create-update-929sf" Mar 20 16:21:31 crc kubenswrapper[4813]: I0320 16:21:31.212335 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.278977523 podStartE2EDuration="6.212313598s" podCreationTimestamp="2026-03-20 16:21:25 +0000 UTC" firstStartedPulling="2026-03-20 16:21:26.580328276 +0000 UTC m=+2616.003031127" lastFinishedPulling="2026-03-20 16:21:30.513664351 +0000 UTC m=+2619.936367202" observedRunningTime="2026-03-20 16:21:31.204936739 +0000 UTC m=+2620.627639580" watchObservedRunningTime="2026-03-20 16:21:31.212313598 +0000 UTC m=+2620.635016439" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.085307 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb"] Mar 20 16:21:33 crc kubenswrapper[4813]: E0320 16:21:33.085909 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63a2a35d-aaa9-4e42-8c4d-bb20f4095984" containerName="mariadb-database-create" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.085926 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="63a2a35d-aaa9-4e42-8c4d-bb20f4095984" containerName="mariadb-database-create" Mar 20 16:21:33 crc kubenswrapper[4813]: E0320 16:21:33.085955 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc6d142f-eb92-4213-93b8-d10527328b9b" containerName="mariadb-account-create-update" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.085963 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc6d142f-eb92-4213-93b8-d10527328b9b" containerName="mariadb-account-create-update" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.086160 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc6d142f-eb92-4213-93b8-d10527328b9b" containerName="mariadb-account-create-update" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.086179 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="63a2a35d-aaa9-4e42-8c4d-bb20f4095984" containerName="mariadb-database-create" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.086812 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.089073 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.095072 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-tl4fb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.098718 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb"] Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.221716 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.221815 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gppff\" (UniqueName: \"kubernetes.io/projected/1986a636-70f9-4147-9e78-7ed329bff117-kube-api-access-gppff\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.221874 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-db-sync-config-data\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.221926 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-config-data\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.270768 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:21:33 crc kubenswrapper[4813]: E0320 16:21:33.271152 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.323031 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-db-sync-config-data\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.324267 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-config-data\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.324396 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.325832 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gppff\" (UniqueName: \"kubernetes.io/projected/1986a636-70f9-4147-9e78-7ed329bff117-kube-api-access-gppff\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.328237 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-config-data\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.337204 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.363448 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-db-sync-config-data\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.380152 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gppff\" (UniqueName: \"kubernetes.io/projected/1986a636-70f9-4147-9e78-7ed329bff117-kube-api-access-gppff\") pod \"watcher-kuttl-db-sync-lf5qb\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:33 crc kubenswrapper[4813]: I0320 16:21:33.411849 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:34 crc kubenswrapper[4813]: I0320 16:21:34.080075 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb"] Mar 20 16:21:34 crc kubenswrapper[4813]: I0320 16:21:34.207077 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" event={"ID":"1986a636-70f9-4147-9e78-7ed329bff117","Type":"ContainerStarted","Data":"98a91f82f33defd3f3e85cd38d9a4bed1493a044cd81715d302c97ea6c60f0c0"} Mar 20 16:21:35 crc kubenswrapper[4813]: I0320 16:21:35.214907 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" event={"ID":"1986a636-70f9-4147-9e78-7ed329bff117","Type":"ContainerStarted","Data":"5cb3b5aa8eaa561c164cfc9b67551c982dc1ab06a2ccc54e95503c98ed645824"} Mar 20 16:21:35 crc kubenswrapper[4813]: I0320 16:21:35.237905 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" podStartSLOduration=2.237885672 podStartE2EDuration="2.237885672s" podCreationTimestamp="2026-03-20 16:21:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:21:35.236312339 +0000 UTC m=+2624.659015180" watchObservedRunningTime="2026-03-20 16:21:35.237885672 +0000 UTC m=+2624.660588513" Mar 20 16:21:37 crc kubenswrapper[4813]: I0320 16:21:37.230434 4813 generic.go:334] "Generic (PLEG): container finished" podID="1986a636-70f9-4147-9e78-7ed329bff117" containerID="5cb3b5aa8eaa561c164cfc9b67551c982dc1ab06a2ccc54e95503c98ed645824" exitCode=0 Mar 20 16:21:37 crc kubenswrapper[4813]: I0320 16:21:37.230518 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" event={"ID":"1986a636-70f9-4147-9e78-7ed329bff117","Type":"ContainerDied","Data":"5cb3b5aa8eaa561c164cfc9b67551c982dc1ab06a2ccc54e95503c98ed645824"} Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.607540 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.713738 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-db-sync-config-data\") pod \"1986a636-70f9-4147-9e78-7ed329bff117\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.713842 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-config-data\") pod \"1986a636-70f9-4147-9e78-7ed329bff117\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.713973 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gppff\" (UniqueName: \"kubernetes.io/projected/1986a636-70f9-4147-9e78-7ed329bff117-kube-api-access-gppff\") pod \"1986a636-70f9-4147-9e78-7ed329bff117\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.714030 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-combined-ca-bundle\") pod \"1986a636-70f9-4147-9e78-7ed329bff117\" (UID: \"1986a636-70f9-4147-9e78-7ed329bff117\") " Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.719072 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1986a636-70f9-4147-9e78-7ed329bff117" (UID: "1986a636-70f9-4147-9e78-7ed329bff117"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.721686 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1986a636-70f9-4147-9e78-7ed329bff117-kube-api-access-gppff" (OuterVolumeSpecName: "kube-api-access-gppff") pod "1986a636-70f9-4147-9e78-7ed329bff117" (UID: "1986a636-70f9-4147-9e78-7ed329bff117"). InnerVolumeSpecName "kube-api-access-gppff". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.736449 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1986a636-70f9-4147-9e78-7ed329bff117" (UID: "1986a636-70f9-4147-9e78-7ed329bff117"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.764987 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-config-data" (OuterVolumeSpecName: "config-data") pod "1986a636-70f9-4147-9e78-7ed329bff117" (UID: "1986a636-70f9-4147-9e78-7ed329bff117"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.815938 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gppff\" (UniqueName: \"kubernetes.io/projected/1986a636-70f9-4147-9e78-7ed329bff117-kube-api-access-gppff\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.815968 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.815978 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:38 crc kubenswrapper[4813]: I0320 16:21:38.815986 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1986a636-70f9-4147-9e78-7ed329bff117-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.247007 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" event={"ID":"1986a636-70f9-4147-9e78-7ed329bff117","Type":"ContainerDied","Data":"98a91f82f33defd3f3e85cd38d9a4bed1493a044cd81715d302c97ea6c60f0c0"} Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.247071 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98a91f82f33defd3f3e85cd38d9a4bed1493a044cd81715d302c97ea6c60f0c0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.247121 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.519584 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:39 crc kubenswrapper[4813]: E0320 16:21:39.519993 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1986a636-70f9-4147-9e78-7ed329bff117" containerName="watcher-kuttl-db-sync" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.520037 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1986a636-70f9-4147-9e78-7ed329bff117" containerName="watcher-kuttl-db-sync" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.520253 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1986a636-70f9-4147-9e78-7ed329bff117" containerName="watcher-kuttl-db-sync" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.521318 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.524858 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-tl4fb" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.525281 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.531048 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.553689 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.556916 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.565070 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.586227 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.628906 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.629688 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412e9ea2-a753-4686-bc74-5a0816791634-logs\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.629720 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.629754 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.630123 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7ss6\" (UniqueName: \"kubernetes.io/projected/412e9ea2-a753-4686-bc74-5a0816791634-kube-api-access-v7ss6\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.630191 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.634122 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.635120 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.637222 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.651553 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731420 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731474 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731547 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731583 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml74z\" (UniqueName: \"kubernetes.io/projected/9de8fb0c-b912-4305-b271-5e56a7efbb00-kube-api-access-ml74z\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731617 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7ss6\" (UniqueName: \"kubernetes.io/projected/412e9ea2-a753-4686-bc74-5a0816791634-kube-api-access-v7ss6\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731637 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731665 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731719 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9de8fb0c-b912-4305-b271-5e56a7efbb00-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412e9ea2-a753-4686-bc74-5a0816791634-logs\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.731788 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.732851 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412e9ea2-a753-4686-bc74-5a0816791634-logs\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.737140 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.739476 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.740131 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.740974 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.755272 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7ss6\" (UniqueName: \"kubernetes.io/projected/412e9ea2-a753-4686-bc74-5a0816791634-kube-api-access-v7ss6\") pod \"watcher-kuttl-api-0\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.832713 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.832764 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fafa634-d7cd-4e51-868d-3d32fe0df856-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.832795 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9de8fb0c-b912-4305-b271-5e56a7efbb00-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.832821 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.833111 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.833216 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.833279 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9de8fb0c-b912-4305-b271-5e56a7efbb00-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.833301 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml74z\" (UniqueName: \"kubernetes.io/projected/9de8fb0c-b912-4305-b271-5e56a7efbb00-kube-api-access-ml74z\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.834190 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9psm8\" (UniqueName: \"kubernetes.io/projected/1fafa634-d7cd-4e51-868d-3d32fe0df856-kube-api-access-9psm8\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.834353 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.834451 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.834524 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.836681 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.836867 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.837324 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.847177 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.851708 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml74z\" (UniqueName: \"kubernetes.io/projected/9de8fb0c-b912-4305-b271-5e56a7efbb00-kube-api-access-ml74z\") pod \"watcher-kuttl-applier-0\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.892823 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.938616 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fafa634-d7cd-4e51-868d-3d32fe0df856-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.938805 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.939019 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9psm8\" (UniqueName: \"kubernetes.io/projected/1fafa634-d7cd-4e51-868d-3d32fe0df856-kube-api-access-9psm8\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.939084 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.939142 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.939167 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.940276 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fafa634-d7cd-4e51-868d-3d32fe0df856-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.944236 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.949133 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.949904 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.950039 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.959047 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9psm8\" (UniqueName: \"kubernetes.io/projected/1fafa634-d7cd-4e51-868d-3d32fe0df856-kube-api-access-9psm8\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:39 crc kubenswrapper[4813]: I0320 16:21:39.961415 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:40 crc kubenswrapper[4813]: I0320 16:21:40.312452 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:40 crc kubenswrapper[4813]: I0320 16:21:40.401319 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:21:40 crc kubenswrapper[4813]: I0320 16:21:40.544128 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.298808 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1fafa634-d7cd-4e51-868d-3d32fe0df856","Type":"ContainerStarted","Data":"08170888008c4e9dbd548d86b00d0fefacbf4e4d564d0f4867bd12620fcaa737"} Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.299266 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1fafa634-d7cd-4e51-868d-3d32fe0df856","Type":"ContainerStarted","Data":"15d09ad893569ef9515369bc04fc282719dd6ce1092928fda339910baf1884fb"} Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.299285 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"412e9ea2-a753-4686-bc74-5a0816791634","Type":"ContainerStarted","Data":"92ddaef986f7dd8398e88f14bc75b44e7d0475d76bf474c25b00b09be9260afc"} Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.299299 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"412e9ea2-a753-4686-bc74-5a0816791634","Type":"ContainerStarted","Data":"7bcce74a7897345272e6e623e5030205fda02f3cacf34a73f02eae120bb2a25e"} Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.299311 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"412e9ea2-a753-4686-bc74-5a0816791634","Type":"ContainerStarted","Data":"e5fa9c9cc90e7ea74b5509697fded9e20618638c5207c64da76f444e2ea13deb"} Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.299323 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"9de8fb0c-b912-4305-b271-5e56a7efbb00","Type":"ContainerStarted","Data":"b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367"} Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.299336 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"9de8fb0c-b912-4305-b271-5e56a7efbb00","Type":"ContainerStarted","Data":"2ae06161d1b8be68193211a088abb158d7c0eb4c8e55cef6957377e20961b841"} Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.385393 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.385373755 podStartE2EDuration="2.385373755s" podCreationTimestamp="2026-03-20 16:21:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:21:41.379611399 +0000 UTC m=+2630.802314240" watchObservedRunningTime="2026-03-20 16:21:41.385373755 +0000 UTC m=+2630.808076596" Mar 20 16:21:41 crc kubenswrapper[4813]: I0320 16:21:41.418251 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.418228231 podStartE2EDuration="2.418228231s" podCreationTimestamp="2026-03-20 16:21:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:21:41.409713782 +0000 UTC m=+2630.832416633" watchObservedRunningTime="2026-03-20 16:21:41.418228231 +0000 UTC m=+2630.840931072" Mar 20 16:21:42 crc kubenswrapper[4813]: I0320 16:21:42.292263 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:44 crc kubenswrapper[4813]: I0320 16:21:44.478687 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:44 crc kubenswrapper[4813]: I0320 16:21:44.508136 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=5.508098483 podStartE2EDuration="5.508098483s" podCreationTimestamp="2026-03-20 16:21:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:21:41.432872536 +0000 UTC m=+2630.855575377" watchObservedRunningTime="2026-03-20 16:21:44.508098483 +0000 UTC m=+2633.930801324" Mar 20 16:21:44 crc kubenswrapper[4813]: I0320 16:21:44.848682 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:44 crc kubenswrapper[4813]: I0320 16:21:44.893934 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:48 crc kubenswrapper[4813]: I0320 16:21:48.266475 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:21:48 crc kubenswrapper[4813]: E0320 16:21:48.267101 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:21:49 crc kubenswrapper[4813]: I0320 16:21:49.847936 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:49 crc kubenswrapper[4813]: I0320 16:21:49.855603 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:49 crc kubenswrapper[4813]: I0320 16:21:49.894311 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:49 crc kubenswrapper[4813]: I0320 16:21:49.931836 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:49 crc kubenswrapper[4813]: I0320 16:21:49.962765 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:49 crc kubenswrapper[4813]: I0320 16:21:49.986218 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:50 crc kubenswrapper[4813]: I0320 16:21:50.365343 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:50 crc kubenswrapper[4813]: I0320 16:21:50.371319 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:50 crc kubenswrapper[4813]: I0320 16:21:50.389576 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:21:50 crc kubenswrapper[4813]: I0320 16:21:50.400021 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:21:52 crc kubenswrapper[4813]: I0320 16:21:52.643611 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:52 crc kubenswrapper[4813]: I0320 16:21:52.645653 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-central-agent" containerID="cri-o://1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6" gracePeriod=30 Mar 20 16:21:52 crc kubenswrapper[4813]: I0320 16:21:52.646546 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="proxy-httpd" containerID="cri-o://28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f" gracePeriod=30 Mar 20 16:21:52 crc kubenswrapper[4813]: I0320 16:21:52.646717 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="sg-core" containerID="cri-o://6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997" gracePeriod=30 Mar 20 16:21:52 crc kubenswrapper[4813]: I0320 16:21:52.646849 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-notification-agent" containerID="cri-o://8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074" gracePeriod=30 Mar 20 16:21:52 crc kubenswrapper[4813]: I0320 16:21:52.711849 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.204:3000/\": EOF" Mar 20 16:21:53 crc kubenswrapper[4813]: I0320 16:21:53.390461 4813 generic.go:334] "Generic (PLEG): container finished" podID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerID="28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f" exitCode=0 Mar 20 16:21:53 crc kubenswrapper[4813]: I0320 16:21:53.390738 4813 generic.go:334] "Generic (PLEG): container finished" podID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerID="6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997" exitCode=2 Mar 20 16:21:53 crc kubenswrapper[4813]: I0320 16:21:53.390513 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerDied","Data":"28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f"} Mar 20 16:21:53 crc kubenswrapper[4813]: I0320 16:21:53.390794 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerDied","Data":"6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997"} Mar 20 16:21:53 crc kubenswrapper[4813]: I0320 16:21:53.390825 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerDied","Data":"1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6"} Mar 20 16:21:53 crc kubenswrapper[4813]: I0320 16:21:53.390746 4813 generic.go:334] "Generic (PLEG): container finished" podID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerID="1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6" exitCode=0 Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.151244 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326339 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-log-httpd\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326525 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-ceilometer-tls-certs\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326573 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-config-data\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326594 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-scripts\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326643 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfqnt\" (UniqueName: \"kubernetes.io/projected/c9f704e1-7801-45e1-a3ff-d9318219687a-kube-api-access-gfqnt\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326670 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-run-httpd\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326702 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-combined-ca-bundle\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326763 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-sg-core-conf-yaml\") pod \"c9f704e1-7801-45e1-a3ff-d9318219687a\" (UID: \"c9f704e1-7801-45e1-a3ff-d9318219687a\") " Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.326960 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.327281 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.327356 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.334670 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9f704e1-7801-45e1-a3ff-d9318219687a-kube-api-access-gfqnt" (OuterVolumeSpecName: "kube-api-access-gfqnt") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "kube-api-access-gfqnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.343885 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-scripts" (OuterVolumeSpecName: "scripts") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.351441 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.386765 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.390627 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qdk8l"] Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.391892 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="proxy-httpd" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.391919 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="proxy-httpd" Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.391935 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-central-agent" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.391944 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-central-agent" Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.391957 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="sg-core" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.391966 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="sg-core" Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.391985 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-notification-agent" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.391994 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-notification-agent" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.392172 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="proxy-httpd" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.392201 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-notification-agent" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.392214 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="sg-core" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.392226 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerName="ceilometer-central-agent" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.394102 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.401750 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qdk8l"] Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.409409 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.418884 4813 generic.go:334] "Generic (PLEG): container finished" podID="c9f704e1-7801-45e1-a3ff-d9318219687a" containerID="8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074" exitCode=0 Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.418932 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerDied","Data":"8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074"} Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.418964 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"c9f704e1-7801-45e1-a3ff-d9318219687a","Type":"ContainerDied","Data":"bcb1f5bb9c692ebb2b1ddb924e3d800b8356142aca0bbbbb5d58cd4d1edeb04e"} Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.418986 4813 scope.go:117] "RemoveContainer" containerID="28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.419134 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.432059 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.432086 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfqnt\" (UniqueName: \"kubernetes.io/projected/c9f704e1-7801-45e1-a3ff-d9318219687a-kube-api-access-gfqnt\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.432095 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9f704e1-7801-45e1-a3ff-d9318219687a-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.432103 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.432112 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.432120 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.448727 4813 scope.go:117] "RemoveContainer" containerID="6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.465695 4813 scope.go:117] "RemoveContainer" containerID="8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.480405 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-config-data" (OuterVolumeSpecName: "config-data") pod "c9f704e1-7801-45e1-a3ff-d9318219687a" (UID: "c9f704e1-7801-45e1-a3ff-d9318219687a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.487055 4813 scope.go:117] "RemoveContainer" containerID="1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.504176 4813 scope.go:117] "RemoveContainer" containerID="28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f" Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.504679 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f\": container with ID starting with 28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f not found: ID does not exist" containerID="28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.504711 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f"} err="failed to get container status \"28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f\": rpc error: code = NotFound desc = could not find container \"28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f\": container with ID starting with 28caff1789e92afe4e4ba2373d3aaa2a92cb65700e8ad1595b678d6ea513524f not found: ID does not exist" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.504733 4813 scope.go:117] "RemoveContainer" containerID="6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997" Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.504954 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997\": container with ID starting with 6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997 not found: ID does not exist" containerID="6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.504972 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997"} err="failed to get container status \"6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997\": rpc error: code = NotFound desc = could not find container \"6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997\": container with ID starting with 6da5811ca8147e01a706b3321ddae183d5ace35776bc8f6eaaa06e77981bc997 not found: ID does not exist" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.504986 4813 scope.go:117] "RemoveContainer" containerID="8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074" Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.505146 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074\": container with ID starting with 8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074 not found: ID does not exist" containerID="8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.505164 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074"} err="failed to get container status \"8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074\": rpc error: code = NotFound desc = could not find container \"8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074\": container with ID starting with 8c1d0a692d8656b1dbe4aab8773b0f691720336f6fbe5447012d00b55c8b9074 not found: ID does not exist" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.505177 4813 scope.go:117] "RemoveContainer" containerID="1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6" Mar 20 16:21:54 crc kubenswrapper[4813]: E0320 16:21:54.505423 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6\": container with ID starting with 1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6 not found: ID does not exist" containerID="1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.505443 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6"} err="failed to get container status \"1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6\": rpc error: code = NotFound desc = could not find container \"1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6\": container with ID starting with 1a57dff3ec2b7e0dac130663bcb0e6815f1d2bd747bca0d0b8f2f27f3884ffb6 not found: ID does not exist" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.533179 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-catalog-content\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.533282 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtzzk\" (UniqueName: \"kubernetes.io/projected/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-kube-api-access-vtzzk\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.533359 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-utilities\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.533437 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f704e1-7801-45e1-a3ff-d9318219687a-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.634959 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-catalog-content\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.635072 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtzzk\" (UniqueName: \"kubernetes.io/projected/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-kube-api-access-vtzzk\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.635116 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-utilities\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.635475 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-catalog-content\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.635540 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-utilities\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.659392 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtzzk\" (UniqueName: \"kubernetes.io/projected/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-kube-api-access-vtzzk\") pod \"redhat-operators-qdk8l\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.741379 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.749563 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.761411 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.784229 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.787188 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.792609 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.792838 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.792977 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.800455 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944011 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-run-httpd\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944074 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944091 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-log-httpd\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944112 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-scripts\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944126 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-config-data\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944148 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944165 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:54 crc kubenswrapper[4813]: I0320 16:21:54.944326 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpr77\" (UniqueName: \"kubernetes.io/projected/4fa1cf5c-db32-4305-95e5-d0e69b581d36-kube-api-access-mpr77\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046082 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-run-httpd\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046148 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046170 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-log-httpd\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046194 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-scripts\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046209 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-config-data\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046231 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046247 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046287 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpr77\" (UniqueName: \"kubernetes.io/projected/4fa1cf5c-db32-4305-95e5-d0e69b581d36-kube-api-access-mpr77\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.046644 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-run-httpd\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.047256 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-log-httpd\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.052733 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-scripts\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.052739 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.053706 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.053801 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-config-data\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.054112 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.064775 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpr77\" (UniqueName: \"kubernetes.io/projected/4fa1cf5c-db32-4305-95e5-d0e69b581d36-kube-api-access-mpr77\") pod \"ceilometer-0\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.125109 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.216962 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qdk8l"] Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.300118 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9f704e1-7801-45e1-a3ff-d9318219687a" path="/var/lib/kubelet/pods/c9f704e1-7801-45e1-a3ff-d9318219687a/volumes" Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.432321 4813 generic.go:334] "Generic (PLEG): container finished" podID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerID="b6e002c6990351ab308564ce6d73240bd0f7ee34abaa814085d2f8fcf5183843" exitCode=0 Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.432356 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qdk8l" event={"ID":"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd","Type":"ContainerDied","Data":"b6e002c6990351ab308564ce6d73240bd0f7ee34abaa814085d2f8fcf5183843"} Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.432376 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qdk8l" event={"ID":"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd","Type":"ContainerStarted","Data":"84e870bdc957be090b33c3c817aa33244f1cf33950b05da3fa1d565f72b5bdf9"} Mar 20 16:21:55 crc kubenswrapper[4813]: I0320 16:21:55.642117 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.266045 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.275857 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-lf5qb"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.308184 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher9cc2-account-delete-889ck"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.309446 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.330425 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher9cc2-account-delete-889ck"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.343179 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.343401 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="1fafa634-d7cd-4e51-868d-3d32fe0df856" containerName="watcher-decision-engine" containerID="cri-o://08170888008c4e9dbd548d86b00d0fefacbf4e4d564d0f4867bd12620fcaa737" gracePeriod=30 Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.414469 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.414769 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="9de8fb0c-b912-4305-b271-5e56a7efbb00" containerName="watcher-applier" containerID="cri-o://b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367" gracePeriod=30 Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.463586 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerStarted","Data":"c340b2a18b34704ee7aa474ef1a23c1e266350a32b32a7f1a26e6529eb9228d6"} Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.478142 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sb22\" (UniqueName: \"kubernetes.io/projected/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-kube-api-access-6sb22\") pod \"watcher9cc2-account-delete-889ck\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.478337 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-operator-scripts\") pod \"watcher9cc2-account-delete-889ck\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.529385 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.529621 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-kuttl-api-log" containerID="cri-o://7bcce74a7897345272e6e623e5030205fda02f3cacf34a73f02eae120bb2a25e" gracePeriod=30 Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.529759 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-api" containerID="cri-o://92ddaef986f7dd8398e88f14bc75b44e7d0475d76bf474c25b00b09be9260afc" gracePeriod=30 Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.579297 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-operator-scripts\") pod \"watcher9cc2-account-delete-889ck\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.579410 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sb22\" (UniqueName: \"kubernetes.io/projected/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-kube-api-access-6sb22\") pod \"watcher9cc2-account-delete-889ck\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.580439 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-operator-scripts\") pod \"watcher9cc2-account-delete-889ck\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.613182 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sb22\" (UniqueName: \"kubernetes.io/projected/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-kube-api-access-6sb22\") pod \"watcher9cc2-account-delete-889ck\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:56 crc kubenswrapper[4813]: E0320 16:21:56.736967 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod412e9ea2_a753_4686_bc74_5a0816791634.slice/crio-conmon-7bcce74a7897345272e6e623e5030205fda02f3cacf34a73f02eae120bb2a25e.scope\": RecentStats: unable to find data in memory cache]" Mar 20 16:21:56 crc kubenswrapper[4813]: I0320 16:21:56.862353 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.286194 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1986a636-70f9-4147-9e78-7ed329bff117" path="/var/lib/kubelet/pods/1986a636-70f9-4147-9e78-7ed329bff117/volumes" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.397908 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher9cc2-account-delete-889ck"] Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.477273 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qdk8l" event={"ID":"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd","Type":"ContainerStarted","Data":"e9df6c8377f6a7a284b284e1fe189941c65bde0f79037ddd4f7bf7f0eb0e8d3f"} Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.497404 4813 generic.go:334] "Generic (PLEG): container finished" podID="412e9ea2-a753-4686-bc74-5a0816791634" containerID="92ddaef986f7dd8398e88f14bc75b44e7d0475d76bf474c25b00b09be9260afc" exitCode=0 Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.497431 4813 generic.go:334] "Generic (PLEG): container finished" podID="412e9ea2-a753-4686-bc74-5a0816791634" containerID="7bcce74a7897345272e6e623e5030205fda02f3cacf34a73f02eae120bb2a25e" exitCode=143 Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.497523 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"412e9ea2-a753-4686-bc74-5a0816791634","Type":"ContainerDied","Data":"92ddaef986f7dd8398e88f14bc75b44e7d0475d76bf474c25b00b09be9260afc"} Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.497576 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"412e9ea2-a753-4686-bc74-5a0816791634","Type":"ContainerDied","Data":"7bcce74a7897345272e6e623e5030205fda02f3cacf34a73f02eae120bb2a25e"} Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.506050 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" event={"ID":"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4","Type":"ContainerStarted","Data":"8d59dd17a277b89669654c76efaca76c1853f16396d577a4f16060e258c0dd54"} Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.532874 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerStarted","Data":"df5d079fd8420cf63379115fc7ecc4fc8197f970aedee4345d297a1c57c4e76b"} Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.532916 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerStarted","Data":"ab0e22f4e5943df8e595e547a7021e991a5431b7462f307b7a311effbdd1e948"} Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.533598 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.704028 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-custom-prometheus-ca\") pod \"412e9ea2-a753-4686-bc74-5a0816791634\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.704096 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7ss6\" (UniqueName: \"kubernetes.io/projected/412e9ea2-a753-4686-bc74-5a0816791634-kube-api-access-v7ss6\") pod \"412e9ea2-a753-4686-bc74-5a0816791634\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.704199 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412e9ea2-a753-4686-bc74-5a0816791634-logs\") pod \"412e9ea2-a753-4686-bc74-5a0816791634\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.704234 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-config-data\") pod \"412e9ea2-a753-4686-bc74-5a0816791634\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.704276 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-cert-memcached-mtls\") pod \"412e9ea2-a753-4686-bc74-5a0816791634\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.704295 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-combined-ca-bundle\") pod \"412e9ea2-a753-4686-bc74-5a0816791634\" (UID: \"412e9ea2-a753-4686-bc74-5a0816791634\") " Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.705281 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/412e9ea2-a753-4686-bc74-5a0816791634-logs" (OuterVolumeSpecName: "logs") pod "412e9ea2-a753-4686-bc74-5a0816791634" (UID: "412e9ea2-a753-4686-bc74-5a0816791634"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.714748 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/412e9ea2-a753-4686-bc74-5a0816791634-kube-api-access-v7ss6" (OuterVolumeSpecName: "kube-api-access-v7ss6") pod "412e9ea2-a753-4686-bc74-5a0816791634" (UID: "412e9ea2-a753-4686-bc74-5a0816791634"). InnerVolumeSpecName "kube-api-access-v7ss6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.749928 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "412e9ea2-a753-4686-bc74-5a0816791634" (UID: "412e9ea2-a753-4686-bc74-5a0816791634"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.773196 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "412e9ea2-a753-4686-bc74-5a0816791634" (UID: "412e9ea2-a753-4686-bc74-5a0816791634"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.778599 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-config-data" (OuterVolumeSpecName: "config-data") pod "412e9ea2-a753-4686-bc74-5a0816791634" (UID: "412e9ea2-a753-4686-bc74-5a0816791634"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.805198 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "412e9ea2-a753-4686-bc74-5a0816791634" (UID: "412e9ea2-a753-4686-bc74-5a0816791634"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.806006 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412e9ea2-a753-4686-bc74-5a0816791634-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.806034 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.806045 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.806056 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.806064 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/412e9ea2-a753-4686-bc74-5a0816791634-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:57 crc kubenswrapper[4813]: I0320 16:21:57.806073 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7ss6\" (UniqueName: \"kubernetes.io/projected/412e9ea2-a753-4686-bc74-5a0816791634-kube-api-access-v7ss6\") on node \"crc\" DevicePath \"\"" Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.549029 4813 generic.go:334] "Generic (PLEG): container finished" podID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerID="e9df6c8377f6a7a284b284e1fe189941c65bde0f79037ddd4f7bf7f0eb0e8d3f" exitCode=0 Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.549562 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qdk8l" event={"ID":"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd","Type":"ContainerDied","Data":"e9df6c8377f6a7a284b284e1fe189941c65bde0f79037ddd4f7bf7f0eb0e8d3f"} Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.554426 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"412e9ea2-a753-4686-bc74-5a0816791634","Type":"ContainerDied","Data":"e5fa9c9cc90e7ea74b5509697fded9e20618638c5207c64da76f444e2ea13deb"} Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.554512 4813 scope.go:117] "RemoveContainer" containerID="92ddaef986f7dd8398e88f14bc75b44e7d0475d76bf474c25b00b09be9260afc" Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.554512 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.556896 4813 generic.go:334] "Generic (PLEG): container finished" podID="dae07967-0a0c-4ccb-ac26-32b0a83cc3b4" containerID="f3ed76ae2142273e540efabdcb5753d4a8a48626a75679e34c074d4a31d58726" exitCode=0 Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.556979 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" event={"ID":"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4","Type":"ContainerDied","Data":"f3ed76ae2142273e540efabdcb5753d4a8a48626a75679e34c074d4a31d58726"} Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.568207 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerStarted","Data":"4fd6191a7c9d7ca44261d5fcbd3624b0b03aacc2bc58d7442c99a8e1fd08cf35"} Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.598728 4813 scope.go:117] "RemoveContainer" containerID="7bcce74a7897345272e6e623e5030205fda02f3cacf34a73f02eae120bb2a25e" Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.601848 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.610784 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:21:58 crc kubenswrapper[4813]: I0320 16:21:58.811466 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:21:59 crc kubenswrapper[4813]: I0320 16:21:59.276651 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="412e9ea2-a753-4686-bc74-5a0816791634" path="/var/lib/kubelet/pods/412e9ea2-a753-4686-bc74-5a0816791634/volumes" Mar 20 16:21:59 crc kubenswrapper[4813]: I0320 16:21:59.578134 4813 generic.go:334] "Generic (PLEG): container finished" podID="9de8fb0c-b912-4305-b271-5e56a7efbb00" containerID="b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367" exitCode=0 Mar 20 16:21:59 crc kubenswrapper[4813]: I0320 16:21:59.578205 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"9de8fb0c-b912-4305-b271-5e56a7efbb00","Type":"ContainerDied","Data":"b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367"} Mar 20 16:21:59 crc kubenswrapper[4813]: I0320 16:21:59.580443 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qdk8l" event={"ID":"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd","Type":"ContainerStarted","Data":"c149c2cb60306b179b3cb3b66061c813f7c184f29707881f909ad2a3d4d52e91"} Mar 20 16:21:59 crc kubenswrapper[4813]: I0320 16:21:59.609552 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qdk8l" podStartSLOduration=1.787531126 podStartE2EDuration="5.609533288s" podCreationTimestamp="2026-03-20 16:21:54 +0000 UTC" firstStartedPulling="2026-03-20 16:21:55.435717626 +0000 UTC m=+2644.858420477" lastFinishedPulling="2026-03-20 16:21:59.257719798 +0000 UTC m=+2648.680422639" observedRunningTime="2026-03-20 16:21:59.602393165 +0000 UTC m=+2649.025096006" watchObservedRunningTime="2026-03-20 16:21:59.609533288 +0000 UTC m=+2649.032236129" Mar 20 16:21:59 crc kubenswrapper[4813]: E0320 16:21:59.897114 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367 is running failed: container process not found" containerID="b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:21:59 crc kubenswrapper[4813]: E0320 16:21:59.898681 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367 is running failed: container process not found" containerID="b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:21:59 crc kubenswrapper[4813]: E0320 16:21:59.899735 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367 is running failed: container process not found" containerID="b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:21:59 crc kubenswrapper[4813]: E0320 16:21:59.899772 4813 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367 is running failed: container process not found" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="9de8fb0c-b912-4305-b271-5e56a7efbb00" containerName="watcher-applier" Mar 20 16:21:59 crc kubenswrapper[4813]: I0320 16:21:59.913539 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.043099 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sb22\" (UniqueName: \"kubernetes.io/projected/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-kube-api-access-6sb22\") pod \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.043258 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-operator-scripts\") pod \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\" (UID: \"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.043818 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dae07967-0a0c-4ccb-ac26-32b0a83cc3b4" (UID: "dae07967-0a0c-4ccb-ac26-32b0a83cc3b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.048401 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-kube-api-access-6sb22" (OuterVolumeSpecName: "kube-api-access-6sb22") pod "dae07967-0a0c-4ccb-ac26-32b0a83cc3b4" (UID: "dae07967-0a0c-4ccb-ac26-32b0a83cc3b4"). InnerVolumeSpecName "kube-api-access-6sb22". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.074636 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135375 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567062-v4l5d"] Mar 20 16:22:00 crc kubenswrapper[4813]: E0320 16:22:00.135701 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-kuttl-api-log" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135718 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-kuttl-api-log" Mar 20 16:22:00 crc kubenswrapper[4813]: E0320 16:22:00.135736 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-api" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135743 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-api" Mar 20 16:22:00 crc kubenswrapper[4813]: E0320 16:22:00.135756 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9de8fb0c-b912-4305-b271-5e56a7efbb00" containerName="watcher-applier" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135761 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9de8fb0c-b912-4305-b271-5e56a7efbb00" containerName="watcher-applier" Mar 20 16:22:00 crc kubenswrapper[4813]: E0320 16:22:00.135772 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae07967-0a0c-4ccb-ac26-32b0a83cc3b4" containerName="mariadb-account-delete" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135778 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae07967-0a0c-4ccb-ac26-32b0a83cc3b4" containerName="mariadb-account-delete" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135909 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="dae07967-0a0c-4ccb-ac26-32b0a83cc3b4" containerName="mariadb-account-delete" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135919 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-api" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135931 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="412e9ea2-a753-4686-bc74-5a0816791634" containerName="watcher-kuttl-api-log" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.135948 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9de8fb0c-b912-4305-b271-5e56a7efbb00" containerName="watcher-applier" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.136446 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.139131 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.139385 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.139460 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.145252 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sb22\" (UniqueName: \"kubernetes.io/projected/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-kube-api-access-6sb22\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.145290 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.145574 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567062-v4l5d"] Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.247277 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9de8fb0c-b912-4305-b271-5e56a7efbb00-logs\") pod \"9de8fb0c-b912-4305-b271-5e56a7efbb00\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.247360 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml74z\" (UniqueName: \"kubernetes.io/projected/9de8fb0c-b912-4305-b271-5e56a7efbb00-kube-api-access-ml74z\") pod \"9de8fb0c-b912-4305-b271-5e56a7efbb00\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.247518 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-config-data\") pod \"9de8fb0c-b912-4305-b271-5e56a7efbb00\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.247674 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-cert-memcached-mtls\") pod \"9de8fb0c-b912-4305-b271-5e56a7efbb00\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.247701 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-combined-ca-bundle\") pod \"9de8fb0c-b912-4305-b271-5e56a7efbb00\" (UID: \"9de8fb0c-b912-4305-b271-5e56a7efbb00\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.248023 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd6sk\" (UniqueName: \"kubernetes.io/projected/bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec-kube-api-access-fd6sk\") pod \"auto-csr-approver-29567062-v4l5d\" (UID: \"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec\") " pod="openshift-infra/auto-csr-approver-29567062-v4l5d" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.248626 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9de8fb0c-b912-4305-b271-5e56a7efbb00-logs" (OuterVolumeSpecName: "logs") pod "9de8fb0c-b912-4305-b271-5e56a7efbb00" (UID: "9de8fb0c-b912-4305-b271-5e56a7efbb00"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.252849 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9de8fb0c-b912-4305-b271-5e56a7efbb00-kube-api-access-ml74z" (OuterVolumeSpecName: "kube-api-access-ml74z") pod "9de8fb0c-b912-4305-b271-5e56a7efbb00" (UID: "9de8fb0c-b912-4305-b271-5e56a7efbb00"). InnerVolumeSpecName "kube-api-access-ml74z". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.265946 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:22:00 crc kubenswrapper[4813]: E0320 16:22:00.266240 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.282909 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9de8fb0c-b912-4305-b271-5e56a7efbb00" (UID: "9de8fb0c-b912-4305-b271-5e56a7efbb00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.317383 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-config-data" (OuterVolumeSpecName: "config-data") pod "9de8fb0c-b912-4305-b271-5e56a7efbb00" (UID: "9de8fb0c-b912-4305-b271-5e56a7efbb00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.336336 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "9de8fb0c-b912-4305-b271-5e56a7efbb00" (UID: "9de8fb0c-b912-4305-b271-5e56a7efbb00"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.349231 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd6sk\" (UniqueName: \"kubernetes.io/projected/bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec-kube-api-access-fd6sk\") pod \"auto-csr-approver-29567062-v4l5d\" (UID: \"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec\") " pod="openshift-infra/auto-csr-approver-29567062-v4l5d" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.349467 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.349507 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.349519 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9de8fb0c-b912-4305-b271-5e56a7efbb00-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.349532 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml74z\" (UniqueName: \"kubernetes.io/projected/9de8fb0c-b912-4305-b271-5e56a7efbb00-kube-api-access-ml74z\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.349545 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9de8fb0c-b912-4305-b271-5e56a7efbb00-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.367071 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd6sk\" (UniqueName: \"kubernetes.io/projected/bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec-kube-api-access-fd6sk\") pod \"auto-csr-approver-29567062-v4l5d\" (UID: \"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec\") " pod="openshift-infra/auto-csr-approver-29567062-v4l5d" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.464690 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.591381 4813 generic.go:334] "Generic (PLEG): container finished" podID="1fafa634-d7cd-4e51-868d-3d32fe0df856" containerID="08170888008c4e9dbd548d86b00d0fefacbf4e4d564d0f4867bd12620fcaa737" exitCode=0 Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.591463 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1fafa634-d7cd-4e51-868d-3d32fe0df856","Type":"ContainerDied","Data":"08170888008c4e9dbd548d86b00d0fefacbf4e4d564d0f4867bd12620fcaa737"} Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.594287 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"9de8fb0c-b912-4305-b271-5e56a7efbb00","Type":"ContainerDied","Data":"2ae06161d1b8be68193211a088abb158d7c0eb4c8e55cef6957377e20961b841"} Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.594345 4813 scope.go:117] "RemoveContainer" containerID="b4d4f59faffbfcad20dbd6c5875fddc0b3fe7e74d4d5c29529c056cd56567367" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.594538 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.607453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" event={"ID":"dae07967-0a0c-4ccb-ac26-32b0a83cc3b4","Type":"ContainerDied","Data":"8d59dd17a277b89669654c76efaca76c1853f16396d577a4f16060e258c0dd54"} Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.607706 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d59dd17a277b89669654c76efaca76c1853f16396d577a4f16060e258c0dd54" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.607778 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher9cc2-account-delete-889ck" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.632022 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerStarted","Data":"a655fa348f940d53909e9c0d4e9a3dc5e3e7a623f3810c7160b06d420bf76e4b"} Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.632112 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.632099 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-central-agent" containerID="cri-o://ab0e22f4e5943df8e595e547a7021e991a5431b7462f307b7a311effbdd1e948" gracePeriod=30 Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.632247 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="proxy-httpd" containerID="cri-o://a655fa348f940d53909e9c0d4e9a3dc5e3e7a623f3810c7160b06d420bf76e4b" gracePeriod=30 Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.632290 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="sg-core" containerID="cri-o://4fd6191a7c9d7ca44261d5fcbd3624b0b03aacc2bc58d7442c99a8e1fd08cf35" gracePeriod=30 Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.632321 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-notification-agent" containerID="cri-o://df5d079fd8420cf63379115fc7ecc4fc8197f970aedee4345d297a1c57c4e76b" gracePeriod=30 Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.641851 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.667798 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.689255 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.39193531 podStartE2EDuration="6.689234423s" podCreationTimestamp="2026-03-20 16:21:54 +0000 UTC" firstStartedPulling="2026-03-20 16:21:55.642761091 +0000 UTC m=+2645.065463942" lastFinishedPulling="2026-03-20 16:21:59.940060214 +0000 UTC m=+2649.362763055" observedRunningTime="2026-03-20 16:22:00.659277215 +0000 UTC m=+2650.081980066" watchObservedRunningTime="2026-03-20 16:22:00.689234423 +0000 UTC m=+2650.111937274" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.778092 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.857710 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-combined-ca-bundle\") pod \"1fafa634-d7cd-4e51-868d-3d32fe0df856\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.857770 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-custom-prometheus-ca\") pod \"1fafa634-d7cd-4e51-868d-3d32fe0df856\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.857800 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-cert-memcached-mtls\") pod \"1fafa634-d7cd-4e51-868d-3d32fe0df856\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.857840 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-config-data\") pod \"1fafa634-d7cd-4e51-868d-3d32fe0df856\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.857868 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9psm8\" (UniqueName: \"kubernetes.io/projected/1fafa634-d7cd-4e51-868d-3d32fe0df856-kube-api-access-9psm8\") pod \"1fafa634-d7cd-4e51-868d-3d32fe0df856\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.857908 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fafa634-d7cd-4e51-868d-3d32fe0df856-logs\") pod \"1fafa634-d7cd-4e51-868d-3d32fe0df856\" (UID: \"1fafa634-d7cd-4e51-868d-3d32fe0df856\") " Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.858468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fafa634-d7cd-4e51-868d-3d32fe0df856-logs" (OuterVolumeSpecName: "logs") pod "1fafa634-d7cd-4e51-868d-3d32fe0df856" (UID: "1fafa634-d7cd-4e51-868d-3d32fe0df856"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.864718 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fafa634-d7cd-4e51-868d-3d32fe0df856-kube-api-access-9psm8" (OuterVolumeSpecName: "kube-api-access-9psm8") pod "1fafa634-d7cd-4e51-868d-3d32fe0df856" (UID: "1fafa634-d7cd-4e51-868d-3d32fe0df856"). InnerVolumeSpecName "kube-api-access-9psm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.885569 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "1fafa634-d7cd-4e51-868d-3d32fe0df856" (UID: "1fafa634-d7cd-4e51-868d-3d32fe0df856"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.897894 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1fafa634-d7cd-4e51-868d-3d32fe0df856" (UID: "1fafa634-d7cd-4e51-868d-3d32fe0df856"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.912943 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-config-data" (OuterVolumeSpecName: "config-data") pod "1fafa634-d7cd-4e51-868d-3d32fe0df856" (UID: "1fafa634-d7cd-4e51-868d-3d32fe0df856"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.920966 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "1fafa634-d7cd-4e51-868d-3d32fe0df856" (UID: "1fafa634-d7cd-4e51-868d-3d32fe0df856"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.959361 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.959412 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.959428 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.959438 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fafa634-d7cd-4e51-868d-3d32fe0df856-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.959450 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9psm8\" (UniqueName: \"kubernetes.io/projected/1fafa634-d7cd-4e51-868d-3d32fe0df856-kube-api-access-9psm8\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.959463 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fafa634-d7cd-4e51-868d-3d32fe0df856-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:00 crc kubenswrapper[4813]: I0320 16:22:00.980072 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567062-v4l5d"] Mar 20 16:22:01 crc kubenswrapper[4813]: W0320 16:22:01.006909 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfe4a6ec_6522_49dc_a0fc_01e0e71d9aec.slice/crio-ae58b9d3429df0d0d4cd0d49583b9f80ea0a1b39a0db5b2cf97597df96bd7a12 WatchSource:0}: Error finding container ae58b9d3429df0d0d4cd0d49583b9f80ea0a1b39a0db5b2cf97597df96bd7a12: Status 404 returned error can't find the container with id ae58b9d3429df0d0d4cd0d49583b9f80ea0a1b39a0db5b2cf97597df96bd7a12 Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.278100 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9de8fb0c-b912-4305-b271-5e56a7efbb00" path="/var/lib/kubelet/pods/9de8fb0c-b912-4305-b271-5e56a7efbb00/volumes" Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.332800 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-m9s9w"] Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.340157 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-m9s9w"] Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.350690 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher9cc2-account-delete-889ck"] Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.358080 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher9cc2-account-delete-889ck"] Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.364623 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-9cc2-account-create-update-929sf"] Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.371340 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-9cc2-account-create-update-929sf"] Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.643252 4813 generic.go:334] "Generic (PLEG): container finished" podID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerID="4fd6191a7c9d7ca44261d5fcbd3624b0b03aacc2bc58d7442c99a8e1fd08cf35" exitCode=2 Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.644211 4813 generic.go:334] "Generic (PLEG): container finished" podID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerID="df5d079fd8420cf63379115fc7ecc4fc8197f970aedee4345d297a1c57c4e76b" exitCode=0 Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.644293 4813 generic.go:334] "Generic (PLEG): container finished" podID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerID="ab0e22f4e5943df8e595e547a7021e991a5431b7462f307b7a311effbdd1e948" exitCode=0 Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.643323 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerDied","Data":"4fd6191a7c9d7ca44261d5fcbd3624b0b03aacc2bc58d7442c99a8e1fd08cf35"} Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.644503 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerDied","Data":"df5d079fd8420cf63379115fc7ecc4fc8197f970aedee4345d297a1c57c4e76b"} Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.644651 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerDied","Data":"ab0e22f4e5943df8e595e547a7021e991a5431b7462f307b7a311effbdd1e948"} Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.646742 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1fafa634-d7cd-4e51-868d-3d32fe0df856","Type":"ContainerDied","Data":"15d09ad893569ef9515369bc04fc282719dd6ce1092928fda339910baf1884fb"} Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.646781 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.646821 4813 scope.go:117] "RemoveContainer" containerID="08170888008c4e9dbd548d86b00d0fefacbf4e4d564d0f4867bd12620fcaa737" Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.648012 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" event={"ID":"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec","Type":"ContainerStarted","Data":"ae58b9d3429df0d0d4cd0d49583b9f80ea0a1b39a0db5b2cf97597df96bd7a12"} Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.677213 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:01 crc kubenswrapper[4813]: I0320 16:22:01.683211 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.340687 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-wlj62"] Mar 20 16:22:02 crc kubenswrapper[4813]: E0320 16:22:02.341104 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fafa634-d7cd-4e51-868d-3d32fe0df856" containerName="watcher-decision-engine" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.341128 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fafa634-d7cd-4e51-868d-3d32fe0df856" containerName="watcher-decision-engine" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.341310 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fafa634-d7cd-4e51-868d-3d32fe0df856" containerName="watcher-decision-engine" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.342003 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.356301 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-wlj62"] Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.365587 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-fb40-account-create-update-dgj58"] Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.366633 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.368594 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.404537 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-fb40-account-create-update-dgj58"] Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.486134 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnpwh\" (UniqueName: \"kubernetes.io/projected/a674e50a-af44-40a0-af3b-b2943207d4be-kube-api-access-mnpwh\") pod \"watcher-fb40-account-create-update-dgj58\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.486343 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqwrx\" (UniqueName: \"kubernetes.io/projected/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-kube-api-access-bqwrx\") pod \"watcher-db-create-wlj62\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.486573 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-operator-scripts\") pod \"watcher-db-create-wlj62\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.486668 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a674e50a-af44-40a0-af3b-b2943207d4be-operator-scripts\") pod \"watcher-fb40-account-create-update-dgj58\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.588332 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-operator-scripts\") pod \"watcher-db-create-wlj62\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.588423 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a674e50a-af44-40a0-af3b-b2943207d4be-operator-scripts\") pod \"watcher-fb40-account-create-update-dgj58\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.588465 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnpwh\" (UniqueName: \"kubernetes.io/projected/a674e50a-af44-40a0-af3b-b2943207d4be-kube-api-access-mnpwh\") pod \"watcher-fb40-account-create-update-dgj58\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.588561 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqwrx\" (UniqueName: \"kubernetes.io/projected/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-kube-api-access-bqwrx\") pod \"watcher-db-create-wlj62\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.589394 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-operator-scripts\") pod \"watcher-db-create-wlj62\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.589799 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a674e50a-af44-40a0-af3b-b2943207d4be-operator-scripts\") pod \"watcher-fb40-account-create-update-dgj58\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.613230 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqwrx\" (UniqueName: \"kubernetes.io/projected/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-kube-api-access-bqwrx\") pod \"watcher-db-create-wlj62\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.613277 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnpwh\" (UniqueName: \"kubernetes.io/projected/a674e50a-af44-40a0-af3b-b2943207d4be-kube-api-access-mnpwh\") pod \"watcher-fb40-account-create-update-dgj58\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.662572 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:02 crc kubenswrapper[4813]: I0320 16:22:02.692307 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.158323 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-fb40-account-create-update-dgj58"] Mar 20 16:22:03 crc kubenswrapper[4813]: W0320 16:22:03.163782 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda674e50a_af44_40a0_af3b_b2943207d4be.slice/crio-53c5f2d7c176e933f95fd6b3c01337b0527d00df1aed55102ed1decd48a17465 WatchSource:0}: Error finding container 53c5f2d7c176e933f95fd6b3c01337b0527d00df1aed55102ed1decd48a17465: Status 404 returned error can't find the container with id 53c5f2d7c176e933f95fd6b3c01337b0527d00df1aed55102ed1decd48a17465 Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.217343 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-wlj62"] Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.279336 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fafa634-d7cd-4e51-868d-3d32fe0df856" path="/var/lib/kubelet/pods/1fafa634-d7cd-4e51-868d-3d32fe0df856/volumes" Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.280173 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63a2a35d-aaa9-4e42-8c4d-bb20f4095984" path="/var/lib/kubelet/pods/63a2a35d-aaa9-4e42-8c4d-bb20f4095984/volumes" Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.280755 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc6d142f-eb92-4213-93b8-d10527328b9b" path="/var/lib/kubelet/pods/bc6d142f-eb92-4213-93b8-d10527328b9b/volumes" Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.281942 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dae07967-0a0c-4ccb-ac26-32b0a83cc3b4" path="/var/lib/kubelet/pods/dae07967-0a0c-4ccb-ac26-32b0a83cc3b4/volumes" Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.678802 4813 generic.go:334] "Generic (PLEG): container finished" podID="1d18dcc1-59f7-4fc8-95df-e48ff2af539b" containerID="54adb3f5edab8b279a4a0256705cf63f186f5e730a39ddff7fc36897da581103" exitCode=0 Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.678845 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-wlj62" event={"ID":"1d18dcc1-59f7-4fc8-95df-e48ff2af539b","Type":"ContainerDied","Data":"54adb3f5edab8b279a4a0256705cf63f186f5e730a39ddff7fc36897da581103"} Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.678989 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-wlj62" event={"ID":"1d18dcc1-59f7-4fc8-95df-e48ff2af539b","Type":"ContainerStarted","Data":"873772a0dbcde8bd54f9605526b30ff94fcc419b718cea63f915a7f7a2ae6b0c"} Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.681100 4813 generic.go:334] "Generic (PLEG): container finished" podID="a674e50a-af44-40a0-af3b-b2943207d4be" containerID="d04e584f99e8e99b355c27e950835cfefed16595430064e9cb9d277fc31aa0e5" exitCode=0 Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.681138 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" event={"ID":"a674e50a-af44-40a0-af3b-b2943207d4be","Type":"ContainerDied","Data":"d04e584f99e8e99b355c27e950835cfefed16595430064e9cb9d277fc31aa0e5"} Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.681169 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" event={"ID":"a674e50a-af44-40a0-af3b-b2943207d4be","Type":"ContainerStarted","Data":"53c5f2d7c176e933f95fd6b3c01337b0527d00df1aed55102ed1decd48a17465"} Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.682952 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" event={"ID":"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec","Type":"ContainerStarted","Data":"5eaa6587465e7c4640c9411426d0a266673fa422dc272fed1355ae3d3104bcd8"} Mar 20 16:22:03 crc kubenswrapper[4813]: I0320 16:22:03.717148 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" podStartSLOduration=1.463034078 podStartE2EDuration="3.717117774s" podCreationTimestamp="2026-03-20 16:22:00 +0000 UTC" firstStartedPulling="2026-03-20 16:22:01.009873873 +0000 UTC m=+2650.432576714" lastFinishedPulling="2026-03-20 16:22:03.263957569 +0000 UTC m=+2652.686660410" observedRunningTime="2026-03-20 16:22:03.710858945 +0000 UTC m=+2653.133561776" watchObservedRunningTime="2026-03-20 16:22:03.717117774 +0000 UTC m=+2653.139820635" Mar 20 16:22:04 crc kubenswrapper[4813]: I0320 16:22:04.693074 4813 generic.go:334] "Generic (PLEG): container finished" podID="bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec" containerID="5eaa6587465e7c4640c9411426d0a266673fa422dc272fed1355ae3d3104bcd8" exitCode=0 Mar 20 16:22:04 crc kubenswrapper[4813]: I0320 16:22:04.693527 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" event={"ID":"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec","Type":"ContainerDied","Data":"5eaa6587465e7c4640c9411426d0a266673fa422dc272fed1355ae3d3104bcd8"} Mar 20 16:22:04 crc kubenswrapper[4813]: I0320 16:22:04.742894 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:22:04 crc kubenswrapper[4813]: I0320 16:22:04.742947 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.198978 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.203964 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.330955 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-operator-scripts\") pod \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.331018 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqwrx\" (UniqueName: \"kubernetes.io/projected/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-kube-api-access-bqwrx\") pod \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\" (UID: \"1d18dcc1-59f7-4fc8-95df-e48ff2af539b\") " Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.331076 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a674e50a-af44-40a0-af3b-b2943207d4be-operator-scripts\") pod \"a674e50a-af44-40a0-af3b-b2943207d4be\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.331127 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnpwh\" (UniqueName: \"kubernetes.io/projected/a674e50a-af44-40a0-af3b-b2943207d4be-kube-api-access-mnpwh\") pod \"a674e50a-af44-40a0-af3b-b2943207d4be\" (UID: \"a674e50a-af44-40a0-af3b-b2943207d4be\") " Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.333003 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a674e50a-af44-40a0-af3b-b2943207d4be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a674e50a-af44-40a0-af3b-b2943207d4be" (UID: "a674e50a-af44-40a0-af3b-b2943207d4be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.333198 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1d18dcc1-59f7-4fc8-95df-e48ff2af539b" (UID: "1d18dcc1-59f7-4fc8-95df-e48ff2af539b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.337796 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-kube-api-access-bqwrx" (OuterVolumeSpecName: "kube-api-access-bqwrx") pod "1d18dcc1-59f7-4fc8-95df-e48ff2af539b" (UID: "1d18dcc1-59f7-4fc8-95df-e48ff2af539b"). InnerVolumeSpecName "kube-api-access-bqwrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.342684 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a674e50a-af44-40a0-af3b-b2943207d4be-kube-api-access-mnpwh" (OuterVolumeSpecName: "kube-api-access-mnpwh") pod "a674e50a-af44-40a0-af3b-b2943207d4be" (UID: "a674e50a-af44-40a0-af3b-b2943207d4be"). InnerVolumeSpecName "kube-api-access-mnpwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.433317 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.433380 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqwrx\" (UniqueName: \"kubernetes.io/projected/1d18dcc1-59f7-4fc8-95df-e48ff2af539b-kube-api-access-bqwrx\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.433393 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a674e50a-af44-40a0-af3b-b2943207d4be-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.433404 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnpwh\" (UniqueName: \"kubernetes.io/projected/a674e50a-af44-40a0-af3b-b2943207d4be-kube-api-access-mnpwh\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.702472 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-wlj62" event={"ID":"1d18dcc1-59f7-4fc8-95df-e48ff2af539b","Type":"ContainerDied","Data":"873772a0dbcde8bd54f9605526b30ff94fcc419b718cea63f915a7f7a2ae6b0c"} Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.702536 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="873772a0dbcde8bd54f9605526b30ff94fcc419b718cea63f915a7f7a2ae6b0c" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.702547 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-wlj62" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.703771 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.703781 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-fb40-account-create-update-dgj58" event={"ID":"a674e50a-af44-40a0-af3b-b2943207d4be","Type":"ContainerDied","Data":"53c5f2d7c176e933f95fd6b3c01337b0527d00df1aed55102ed1decd48a17465"} Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.703830 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53c5f2d7c176e933f95fd6b3c01337b0527d00df1aed55102ed1decd48a17465" Mar 20 16:22:05 crc kubenswrapper[4813]: I0320 16:22:05.805872 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qdk8l" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="registry-server" probeResult="failure" output=< Mar 20 16:22:05 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Mar 20 16:22:05 crc kubenswrapper[4813]: > Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.046639 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.143972 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd6sk\" (UniqueName: \"kubernetes.io/projected/bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec-kube-api-access-fd6sk\") pod \"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec\" (UID: \"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec\") " Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.150792 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec-kube-api-access-fd6sk" (OuterVolumeSpecName: "kube-api-access-fd6sk") pod "bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec" (UID: "bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec"). InnerVolumeSpecName "kube-api-access-fd6sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.248774 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd6sk\" (UniqueName: \"kubernetes.io/projected/bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec-kube-api-access-fd6sk\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.719558 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" event={"ID":"bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec","Type":"ContainerDied","Data":"ae58b9d3429df0d0d4cd0d49583b9f80ea0a1b39a0db5b2cf97597df96bd7a12"} Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.720669 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae58b9d3429df0d0d4cd0d49583b9f80ea0a1b39a0db5b2cf97597df96bd7a12" Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.719687 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567062-v4l5d" Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.796157 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567056-mzh9b"] Mar 20 16:22:06 crc kubenswrapper[4813]: I0320 16:22:06.811873 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567056-mzh9b"] Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.278954 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0" path="/var/lib/kubelet/pods/fe0d7a75-39a6-4a7e-8ba8-e3e7430b4ab0/volumes" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.693676 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk"] Mar 20 16:22:07 crc kubenswrapper[4813]: E0320 16:22:07.694043 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec" containerName="oc" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.694066 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec" containerName="oc" Mar 20 16:22:07 crc kubenswrapper[4813]: E0320 16:22:07.694092 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d18dcc1-59f7-4fc8-95df-e48ff2af539b" containerName="mariadb-database-create" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.694102 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d18dcc1-59f7-4fc8-95df-e48ff2af539b" containerName="mariadb-database-create" Mar 20 16:22:07 crc kubenswrapper[4813]: E0320 16:22:07.694120 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a674e50a-af44-40a0-af3b-b2943207d4be" containerName="mariadb-account-create-update" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.694128 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a674e50a-af44-40a0-af3b-b2943207d4be" containerName="mariadb-account-create-update" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.694300 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a674e50a-af44-40a0-af3b-b2943207d4be" containerName="mariadb-account-create-update" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.694321 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d18dcc1-59f7-4fc8-95df-e48ff2af539b" containerName="mariadb-database-create" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.694333 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec" containerName="oc" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.694876 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.696959 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-qcrgb" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.697344 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.712181 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk"] Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.772137 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pzlc\" (UniqueName: \"kubernetes.io/projected/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-kube-api-access-8pzlc\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.772285 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.772461 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-db-sync-config-data\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.772773 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-config-data\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.874565 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-config-data\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.874900 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pzlc\" (UniqueName: \"kubernetes.io/projected/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-kube-api-access-8pzlc\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.874941 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.874995 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-db-sync-config-data\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.880024 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-db-sync-config-data\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.886909 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-config-data\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.889539 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pzlc\" (UniqueName: \"kubernetes.io/projected/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-kube-api-access-8pzlc\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:07 crc kubenswrapper[4813]: I0320 16:22:07.892896 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-cd9sk\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:08 crc kubenswrapper[4813]: I0320 16:22:08.031265 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:08 crc kubenswrapper[4813]: I0320 16:22:08.626472 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk"] Mar 20 16:22:08 crc kubenswrapper[4813]: I0320 16:22:08.745474 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" event={"ID":"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb","Type":"ContainerStarted","Data":"06e16c325f09dcbb8f0a09af0829566cbc6130f5b532ff3b1c81bfb9d9b8962a"} Mar 20 16:22:09 crc kubenswrapper[4813]: I0320 16:22:09.769135 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" event={"ID":"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb","Type":"ContainerStarted","Data":"c8201c24790ff7ecc798a8584d9459c973b40619fda3cb355f7141e0dcbf3ddd"} Mar 20 16:22:09 crc kubenswrapper[4813]: I0320 16:22:09.795310 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" podStartSLOduration=2.795290117 podStartE2EDuration="2.795290117s" podCreationTimestamp="2026-03-20 16:22:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:09.787843156 +0000 UTC m=+2659.210545997" watchObservedRunningTime="2026-03-20 16:22:09.795290117 +0000 UTC m=+2659.217992958" Mar 20 16:22:11 crc kubenswrapper[4813]: I0320 16:22:11.789255 4813 generic.go:334] "Generic (PLEG): container finished" podID="b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" containerID="c8201c24790ff7ecc798a8584d9459c973b40619fda3cb355f7141e0dcbf3ddd" exitCode=0 Mar 20 16:22:11 crc kubenswrapper[4813]: I0320 16:22:11.789380 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" event={"ID":"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb","Type":"ContainerDied","Data":"c8201c24790ff7ecc798a8584d9459c973b40619fda3cb355f7141e0dcbf3ddd"} Mar 20 16:22:12 crc kubenswrapper[4813]: I0320 16:22:12.266286 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:22:12 crc kubenswrapper[4813]: E0320 16:22:12.266550 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.181300 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.256010 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-combined-ca-bundle\") pod \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.256131 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pzlc\" (UniqueName: \"kubernetes.io/projected/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-kube-api-access-8pzlc\") pod \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.256189 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-db-sync-config-data\") pod \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.256244 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-config-data\") pod \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\" (UID: \"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb\") " Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.262296 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-kube-api-access-8pzlc" (OuterVolumeSpecName: "kube-api-access-8pzlc") pod "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" (UID: "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb"). InnerVolumeSpecName "kube-api-access-8pzlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.263476 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" (UID: "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.294544 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" (UID: "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.330653 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-config-data" (OuterVolumeSpecName: "config-data") pod "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" (UID: "b1eee3a2-229d-4f45-9b42-24dfe22ba5eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.357935 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.357978 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pzlc\" (UniqueName: \"kubernetes.io/projected/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-kube-api-access-8pzlc\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.357991 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.358005 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.811024 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" event={"ID":"b1eee3a2-229d-4f45-9b42-24dfe22ba5eb","Type":"ContainerDied","Data":"06e16c325f09dcbb8f0a09af0829566cbc6130f5b532ff3b1c81bfb9d9b8962a"} Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.811299 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06e16c325f09dcbb8f0a09af0829566cbc6130f5b532ff3b1c81bfb9d9b8962a" Mar 20 16:22:13 crc kubenswrapper[4813]: I0320 16:22:13.811113 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.086159 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:14 crc kubenswrapper[4813]: E0320 16:22:14.087748 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" containerName="watcher-kuttl-db-sync" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.087769 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" containerName="watcher-kuttl-db-sync" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.089039 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" containerName="watcher-kuttl-db-sync" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.105343 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.107473 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.110229 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-qcrgb" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.128436 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.154971 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.156146 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.159902 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.171208 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.171258 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.171382 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-logs\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.171429 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.171501 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.171527 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gtzv\" (UniqueName: \"kubernetes.io/projected/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-kube-api-access-4gtzv\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.189583 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.273559 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274296 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274361 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-logs\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274396 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274417 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274438 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gtzv\" (UniqueName: \"kubernetes.io/projected/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-kube-api-access-4gtzv\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274462 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274508 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f84babe6-dcb6-4993-a350-60033d580a9f-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274529 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274545 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274571 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd484\" (UniqueName: \"kubernetes.io/projected/f84babe6-dcb6-4993-a350-60033d580a9f-kube-api-access-jd484\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.274610 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.275005 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.275036 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-logs\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.280648 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.286087 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.288450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.289280 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.291214 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.291528 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.323578 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gtzv\" (UniqueName: \"kubernetes.io/projected/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-kube-api-access-4gtzv\") pod \"watcher-kuttl-api-0\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376004 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd484\" (UniqueName: \"kubernetes.io/projected/f84babe6-dcb6-4993-a350-60033d580a9f-kube-api-access-jd484\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376077 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376102 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376135 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376165 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmp6l\" (UniqueName: \"kubernetes.io/projected/8efe1f82-92b0-40a0-839b-a44a005c093a-kube-api-access-nmp6l\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376193 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376231 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376255 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376297 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376315 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8efe1f82-92b0-40a0-839b-a44a005c093a-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376336 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f84babe6-dcb6-4993-a350-60033d580a9f-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.376772 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f84babe6-dcb6-4993-a350-60033d580a9f-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.384787 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.386554 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.386749 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.405080 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd484\" (UniqueName: \"kubernetes.io/projected/f84babe6-dcb6-4993-a350-60033d580a9f-kube-api-access-jd484\") pod \"watcher-kuttl-applier-0\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.425775 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.477560 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.477628 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.477662 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmp6l\" (UniqueName: \"kubernetes.io/projected/8efe1f82-92b0-40a0-839b-a44a005c093a-kube-api-access-nmp6l\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.477710 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.477742 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.477788 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8efe1f82-92b0-40a0-839b-a44a005c093a-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.478230 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8efe1f82-92b0-40a0-839b-a44a005c093a-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.485562 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.487863 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.488006 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.492059 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.495121 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.503068 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmp6l\" (UniqueName: \"kubernetes.io/projected/8efe1f82-92b0-40a0-839b-a44a005c093a-kube-api-access-nmp6l\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.669036 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.804802 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.871614 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:22:14 crc kubenswrapper[4813]: I0320 16:22:14.923525 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:14 crc kubenswrapper[4813]: W0320 16:22:14.932070 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b44c66b_efb4_43f7_b4f6_335c4f8e4dd8.slice/crio-3165025821ed4db806ab59889c51dd2346c91781144b62a246f14ba0c5b4acf5 WatchSource:0}: Error finding container 3165025821ed4db806ab59889c51dd2346c91781144b62a246f14ba0c5b4acf5: Status 404 returned error can't find the container with id 3165025821ed4db806ab59889c51dd2346c91781144b62a246f14ba0c5b4acf5 Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.028026 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:15 crc kubenswrapper[4813]: W0320 16:22:15.031339 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf84babe6_dcb6_4993_a350_60033d580a9f.slice/crio-ca9bc41410d6994fd90e7b4cf4c9e9668197809c369467e65527a755cdfae991 WatchSource:0}: Error finding container ca9bc41410d6994fd90e7b4cf4c9e9668197809c369467e65527a755cdfae991: Status 404 returned error can't find the container with id ca9bc41410d6994fd90e7b4cf4c9e9668197809c369467e65527a755cdfae991 Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.171601 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:15 crc kubenswrapper[4813]: W0320 16:22:15.183215 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8efe1f82_92b0_40a0_839b_a44a005c093a.slice/crio-89012956bd31e474087db8ca83751b9826f09b506432bdf5258d6d1a937ccbfb WatchSource:0}: Error finding container 89012956bd31e474087db8ca83751b9826f09b506432bdf5258d6d1a937ccbfb: Status 404 returned error can't find the container with id 89012956bd31e474087db8ca83751b9826f09b506432bdf5258d6d1a937ccbfb Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.842266 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f84babe6-dcb6-4993-a350-60033d580a9f","Type":"ContainerStarted","Data":"14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b"} Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.842574 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f84babe6-dcb6-4993-a350-60033d580a9f","Type":"ContainerStarted","Data":"ca9bc41410d6994fd90e7b4cf4c9e9668197809c369467e65527a755cdfae991"} Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.844639 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"8efe1f82-92b0-40a0-839b-a44a005c093a","Type":"ContainerStarted","Data":"15fb21a042f437648d72d3263f576533c324680cefa18990e35e15e4e14caab0"} Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.844706 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"8efe1f82-92b0-40a0-839b-a44a005c093a","Type":"ContainerStarted","Data":"89012956bd31e474087db8ca83751b9826f09b506432bdf5258d6d1a937ccbfb"} Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.846331 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8","Type":"ContainerStarted","Data":"627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1"} Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.846378 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8","Type":"ContainerStarted","Data":"8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780"} Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.846389 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8","Type":"ContainerStarted","Data":"3165025821ed4db806ab59889c51dd2346c91781144b62a246f14ba0c5b4acf5"} Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.846515 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.870859 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=1.87084128 podStartE2EDuration="1.87084128s" podCreationTimestamp="2026-03-20 16:22:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:15.862221747 +0000 UTC m=+2665.284924608" watchObservedRunningTime="2026-03-20 16:22:15.87084128 +0000 UTC m=+2665.293544121" Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.889596 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=1.8895738949999998 podStartE2EDuration="1.889573895s" podCreationTimestamp="2026-03-20 16:22:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:15.879965916 +0000 UTC m=+2665.302668757" watchObservedRunningTime="2026-03-20 16:22:15.889573895 +0000 UTC m=+2665.312276746" Mar 20 16:22:15 crc kubenswrapper[4813]: I0320 16:22:15.900987 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=1.9009714020000001 podStartE2EDuration="1.900971402s" podCreationTimestamp="2026-03-20 16:22:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:15.899432271 +0000 UTC m=+2665.322135112" watchObservedRunningTime="2026-03-20 16:22:15.900971402 +0000 UTC m=+2665.323674233" Mar 20 16:22:18 crc kubenswrapper[4813]: I0320 16:22:18.281556 4813 scope.go:117] "RemoveContainer" containerID="900501ec59493b38a0a70e8c9ca1eeb32d7a6965acd8a5e5160442dd1f325f15" Mar 20 16:22:18 crc kubenswrapper[4813]: I0320 16:22:18.372637 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qdk8l"] Mar 20 16:22:18 crc kubenswrapper[4813]: I0320 16:22:18.373213 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qdk8l" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="registry-server" containerID="cri-o://c149c2cb60306b179b3cb3b66061c813f7c184f29707881f909ad2a3d4d52e91" gracePeriod=2 Mar 20 16:22:18 crc kubenswrapper[4813]: I0320 16:22:18.451779 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:18 crc kubenswrapper[4813]: I0320 16:22:18.882845 4813 generic.go:334] "Generic (PLEG): container finished" podID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerID="c149c2cb60306b179b3cb3b66061c813f7c184f29707881f909ad2a3d4d52e91" exitCode=0 Mar 20 16:22:18 crc kubenswrapper[4813]: I0320 16:22:18.882926 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qdk8l" event={"ID":"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd","Type":"ContainerDied","Data":"c149c2cb60306b179b3cb3b66061c813f7c184f29707881f909ad2a3d4d52e91"} Mar 20 16:22:18 crc kubenswrapper[4813]: I0320 16:22:18.981714 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.054954 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-utilities\") pod \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.055422 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-catalog-content\") pod \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.055464 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtzzk\" (UniqueName: \"kubernetes.io/projected/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-kube-api-access-vtzzk\") pod \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\" (UID: \"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd\") " Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.056618 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-utilities" (OuterVolumeSpecName: "utilities") pod "a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" (UID: "a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.059724 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.063999 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-kube-api-access-vtzzk" (OuterVolumeSpecName: "kube-api-access-vtzzk") pod "a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" (UID: "a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd"). InnerVolumeSpecName "kube-api-access-vtzzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.161289 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtzzk\" (UniqueName: \"kubernetes.io/projected/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-kube-api-access-vtzzk\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.241194 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" (UID: "a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.262984 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.426780 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.492830 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.894959 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qdk8l" event={"ID":"a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd","Type":"ContainerDied","Data":"84e870bdc957be090b33c3c817aa33244f1cf33950b05da3fa1d565f72b5bdf9"} Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.895054 4813 scope.go:117] "RemoveContainer" containerID="c149c2cb60306b179b3cb3b66061c813f7c184f29707881f909ad2a3d4d52e91" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.895085 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qdk8l" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.917054 4813 scope.go:117] "RemoveContainer" containerID="e9df6c8377f6a7a284b284e1fe189941c65bde0f79037ddd4f7bf7f0eb0e8d3f" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.920613 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qdk8l"] Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.947754 4813 scope.go:117] "RemoveContainer" containerID="b6e002c6990351ab308564ce6d73240bd0f7ee34abaa814085d2f8fcf5183843" Mar 20 16:22:19 crc kubenswrapper[4813]: I0320 16:22:19.952070 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qdk8l"] Mar 20 16:22:21 crc kubenswrapper[4813]: I0320 16:22:21.274133 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" path="/var/lib/kubelet/pods/a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd/volumes" Mar 20 16:22:23 crc kubenswrapper[4813]: I0320 16:22:23.266163 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:22:23 crc kubenswrapper[4813]: E0320 16:22:23.266547 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.426819 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.431658 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.493190 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.516738 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.670025 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.693938 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.936660 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.943190 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.962556 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:24 crc kubenswrapper[4813]: I0320 16:22:24.962910 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:25 crc kubenswrapper[4813]: I0320 16:22:25.138263 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Mar 20 16:22:30 crc kubenswrapper[4813]: I0320 16:22:30.990146 4813 generic.go:334] "Generic (PLEG): container finished" podID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerID="a655fa348f940d53909e9c0d4e9a3dc5e3e7a623f3810c7160b06d420bf76e4b" exitCode=137 Mar 20 16:22:30 crc kubenswrapper[4813]: I0320 16:22:30.990228 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerDied","Data":"a655fa348f940d53909e9c0d4e9a3dc5e3e7a623f3810c7160b06d420bf76e4b"} Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.054849 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184376 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-run-httpd\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184562 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpr77\" (UniqueName: \"kubernetes.io/projected/4fa1cf5c-db32-4305-95e5-d0e69b581d36-kube-api-access-mpr77\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184604 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-combined-ca-bundle\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184626 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-sg-core-conf-yaml\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184646 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-log-httpd\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184703 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-scripts\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184727 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-ceilometer-tls-certs\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.184754 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-config-data\") pod \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\" (UID: \"4fa1cf5c-db32-4305-95e5-d0e69b581d36\") " Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.190188 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.194391 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.203759 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-scripts" (OuterVolumeSpecName: "scripts") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.207972 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fa1cf5c-db32-4305-95e5-d0e69b581d36-kube-api-access-mpr77" (OuterVolumeSpecName: "kube-api-access-mpr77") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "kube-api-access-mpr77". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.212534 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.235136 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.263623 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.276218 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-config-data" (OuterVolumeSpecName: "config-data") pod "4fa1cf5c-db32-4305-95e5-d0e69b581d36" (UID: "4fa1cf5c-db32-4305-95e5-d0e69b581d36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.285959 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.286104 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.286164 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.286234 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.286296 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpr77\" (UniqueName: \"kubernetes.io/projected/4fa1cf5c-db32-4305-95e5-d0e69b581d36-kube-api-access-mpr77\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.286358 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.286419 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4fa1cf5c-db32-4305-95e5-d0e69b581d36-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:31 crc kubenswrapper[4813]: I0320 16:22:31.286475 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4fa1cf5c-db32-4305-95e5-d0e69b581d36-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.002096 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4fa1cf5c-db32-4305-95e5-d0e69b581d36","Type":"ContainerDied","Data":"c340b2a18b34704ee7aa474ef1a23c1e266350a32b32a7f1a26e6529eb9228d6"} Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.002456 4813 scope.go:117] "RemoveContainer" containerID="a655fa348f940d53909e9c0d4e9a3dc5e3e7a623f3810c7160b06d420bf76e4b" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.002136 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.023588 4813 scope.go:117] "RemoveContainer" containerID="4fd6191a7c9d7ca44261d5fcbd3624b0b03aacc2bc58d7442c99a8e1fd08cf35" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.037610 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.038073 4813 scope.go:117] "RemoveContainer" containerID="df5d079fd8420cf63379115fc7ecc4fc8197f970aedee4345d297a1c57c4e76b" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.046166 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.056699 4813 scope.go:117] "RemoveContainer" containerID="ab0e22f4e5943df8e595e547a7021e991a5431b7462f307b7a311effbdd1e948" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.069625 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:32 crc kubenswrapper[4813]: E0320 16:22:32.070017 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="registry-server" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070037 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="registry-server" Mar 20 16:22:32 crc kubenswrapper[4813]: E0320 16:22:32.070049 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-central-agent" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070058 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-central-agent" Mar 20 16:22:32 crc kubenswrapper[4813]: E0320 16:22:32.070074 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="sg-core" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070082 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="sg-core" Mar 20 16:22:32 crc kubenswrapper[4813]: E0320 16:22:32.070095 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="proxy-httpd" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070102 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="proxy-httpd" Mar 20 16:22:32 crc kubenswrapper[4813]: E0320 16:22:32.070128 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="extract-content" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070136 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="extract-content" Mar 20 16:22:32 crc kubenswrapper[4813]: E0320 16:22:32.070150 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="extract-utilities" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070158 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="extract-utilities" Mar 20 16:22:32 crc kubenswrapper[4813]: E0320 16:22:32.070167 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-notification-agent" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070175 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-notification-agent" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070342 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-notification-agent" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070358 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a03e7c6a-6a3b-44b4-bc01-d016d7aa48cd" containerName="registry-server" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070374 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="sg-core" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070388 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="ceilometer-central-agent" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.070404 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" containerName="proxy-httpd" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.072240 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.074642 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.075061 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.077925 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.080049 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.098151 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-scripts\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.098395 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-run-httpd\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.098533 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-log-httpd\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.098640 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.098716 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5f8n2\" (UniqueName: \"kubernetes.io/projected/2408026e-3bdb-4444-a098-0d7c94c0a4d3-kube-api-access-5f8n2\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.098845 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.098926 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.099005 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-config-data\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201032 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-config-data\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201141 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-scripts\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201186 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-run-httpd\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201221 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-log-httpd\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201259 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201288 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5f8n2\" (UniqueName: \"kubernetes.io/projected/2408026e-3bdb-4444-a098-0d7c94c0a4d3-kube-api-access-5f8n2\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201333 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.201357 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.203065 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-log-httpd\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.203086 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-run-httpd\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.207675 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.207874 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-config-data\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.208188 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.208738 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.209373 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-scripts\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.228085 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5f8n2\" (UniqueName: \"kubernetes.io/projected/2408026e-3bdb-4444-a098-0d7c94c0a4d3-kube-api-access-5f8n2\") pod \"ceilometer-0\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.304239 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.310335 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-cd9sk"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.338882 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcherfb40-account-delete-vrfbb"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.339984 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.356199 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcherfb40-account-delete-vrfbb"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.394417 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.403287 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ece4c32-0d94-4394-adb1-7b88134eeae7-operator-scripts\") pod \"watcherfb40-account-delete-vrfbb\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.403629 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fcdj\" (UniqueName: \"kubernetes.io/projected/4ece4c32-0d94-4394-adb1-7b88134eeae7-kube-api-access-7fcdj\") pod \"watcherfb40-account-delete-vrfbb\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.426586 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.426847 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-kuttl-api-log" containerID="cri-o://8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780" gracePeriod=30 Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.427013 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-api" containerID="cri-o://627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1" gracePeriod=30 Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.478584 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.478833 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="8efe1f82-92b0-40a0-839b-a44a005c093a" containerName="watcher-decision-engine" containerID="cri-o://15fb21a042f437648d72d3263f576533c324680cefa18990e35e15e4e14caab0" gracePeriod=30 Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.505463 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fcdj\" (UniqueName: \"kubernetes.io/projected/4ece4c32-0d94-4394-adb1-7b88134eeae7-kube-api-access-7fcdj\") pod \"watcherfb40-account-delete-vrfbb\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.505579 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ece4c32-0d94-4394-adb1-7b88134eeae7-operator-scripts\") pod \"watcherfb40-account-delete-vrfbb\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.506380 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ece4c32-0d94-4394-adb1-7b88134eeae7-operator-scripts\") pod \"watcherfb40-account-delete-vrfbb\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.510236 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.510458 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="f84babe6-dcb6-4993-a350-60033d580a9f" containerName="watcher-applier" containerID="cri-o://14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" gracePeriod=30 Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.533702 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fcdj\" (UniqueName: \"kubernetes.io/projected/4ece4c32-0d94-4394-adb1-7b88134eeae7-kube-api-access-7fcdj\") pod \"watcherfb40-account-delete-vrfbb\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:32 crc kubenswrapper[4813]: I0320 16:22:32.657770 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:33 crc kubenswrapper[4813]: I0320 16:22:33.038475 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:33 crc kubenswrapper[4813]: I0320 16:22:33.083565 4813 generic.go:334] "Generic (PLEG): container finished" podID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerID="8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780" exitCode=143 Mar 20 16:22:33 crc kubenswrapper[4813]: I0320 16:22:33.083608 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8","Type":"ContainerDied","Data":"8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780"} Mar 20 16:22:33 crc kubenswrapper[4813]: I0320 16:22:33.210383 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcherfb40-account-delete-vrfbb"] Mar 20 16:22:33 crc kubenswrapper[4813]: W0320 16:22:33.216929 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ece4c32_0d94_4394_adb1_7b88134eeae7.slice/crio-a5ad2b2278fb4eb54b72254544f2206460884006051f0cf95dbe56ddad0806a7 WatchSource:0}: Error finding container a5ad2b2278fb4eb54b72254544f2206460884006051f0cf95dbe56ddad0806a7: Status 404 returned error can't find the container with id a5ad2b2278fb4eb54b72254544f2206460884006051f0cf95dbe56ddad0806a7 Mar 20 16:22:33 crc kubenswrapper[4813]: I0320 16:22:33.288103 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fa1cf5c-db32-4305-95e5-d0e69b581d36" path="/var/lib/kubelet/pods/4fa1cf5c-db32-4305-95e5-d0e69b581d36/volumes" Mar 20 16:22:33 crc kubenswrapper[4813]: I0320 16:22:33.288959 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1eee3a2-229d-4f45-9b42-24dfe22ba5eb" path="/var/lib/kubelet/pods/b1eee3a2-229d-4f45-9b42-24dfe22ba5eb/volumes" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.092829 4813 generic.go:334] "Generic (PLEG): container finished" podID="4ece4c32-0d94-4394-adb1-7b88134eeae7" containerID="0b1d34bed68049152622c4bfcb1ab980ca1bf99511f3d054ac4ed905e6643db3" exitCode=0 Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.093058 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" event={"ID":"4ece4c32-0d94-4394-adb1-7b88134eeae7","Type":"ContainerDied","Data":"0b1d34bed68049152622c4bfcb1ab980ca1bf99511f3d054ac4ed905e6643db3"} Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.093391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" event={"ID":"4ece4c32-0d94-4394-adb1-7b88134eeae7","Type":"ContainerStarted","Data":"a5ad2b2278fb4eb54b72254544f2206460884006051f0cf95dbe56ddad0806a7"} Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.095340 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerStarted","Data":"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182"} Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.095381 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerStarted","Data":"aceb4e3ac839645c1637cd04047b70b7f59885dc9badd18065c9c486eda881a5"} Mar 20 16:22:34 crc kubenswrapper[4813]: E0320 16:22:34.507968 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:22:34 crc kubenswrapper[4813]: E0320 16:22:34.509672 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:22:34 crc kubenswrapper[4813]: E0320 16:22:34.512600 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:22:34 crc kubenswrapper[4813]: E0320 16:22:34.512651 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="f84babe6-dcb6-4993-a350-60033d580a9f" containerName="watcher-applier" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.631320 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.782114 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-combined-ca-bundle\") pod \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.782890 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-config-data\") pod \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.782977 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-cert-memcached-mtls\") pod \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.783084 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gtzv\" (UniqueName: \"kubernetes.io/projected/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-kube-api-access-4gtzv\") pod \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.783162 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-logs\") pod \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.783199 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-custom-prometheus-ca\") pod \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\" (UID: \"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8\") " Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.786789 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-logs" (OuterVolumeSpecName: "logs") pod "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" (UID: "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.803928 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-kube-api-access-4gtzv" (OuterVolumeSpecName: "kube-api-access-4gtzv") pod "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" (UID: "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8"). InnerVolumeSpecName "kube-api-access-4gtzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.839716 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" (UID: "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.867459 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" (UID: "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.880114 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-config-data" (OuterVolumeSpecName: "config-data") pod "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" (UID: "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.885500 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.885537 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.885553 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.885565 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.885580 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gtzv\" (UniqueName: \"kubernetes.io/projected/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-kube-api-access-4gtzv\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.891783 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" (UID: "7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:34 crc kubenswrapper[4813]: I0320 16:22:34.987695 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.104595 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerStarted","Data":"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015"} Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.106585 4813 generic.go:334] "Generic (PLEG): container finished" podID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerID="627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1" exitCode=0 Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.106631 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.106653 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8","Type":"ContainerDied","Data":"627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1"} Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.106719 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8","Type":"ContainerDied","Data":"3165025821ed4db806ab59889c51dd2346c91781144b62a246f14ba0c5b4acf5"} Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.106738 4813 scope.go:117] "RemoveContainer" containerID="627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.130208 4813 scope.go:117] "RemoveContainer" containerID="8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.137467 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.176124 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.180060 4813 scope.go:117] "RemoveContainer" containerID="627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1" Mar 20 16:22:35 crc kubenswrapper[4813]: E0320 16:22:35.181631 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1\": container with ID starting with 627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1 not found: ID does not exist" containerID="627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.181685 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1"} err="failed to get container status \"627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1\": rpc error: code = NotFound desc = could not find container \"627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1\": container with ID starting with 627adab19dc0934154793fc2ec983db3e38f8285c8375c93c66a5eac3e9298b1 not found: ID does not exist" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.181711 4813 scope.go:117] "RemoveContainer" containerID="8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780" Mar 20 16:22:35 crc kubenswrapper[4813]: E0320 16:22:35.184724 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780\": container with ID starting with 8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780 not found: ID does not exist" containerID="8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.184760 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780"} err="failed to get container status \"8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780\": rpc error: code = NotFound desc = could not find container \"8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780\": container with ID starting with 8576ad44e1d3119781bf907eac2391ae2e75779d0a02b21da23aa489076c7780 not found: ID does not exist" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.188558 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.274819 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" path="/var/lib/kubelet/pods/7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8/volumes" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.492808 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.596098 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fcdj\" (UniqueName: \"kubernetes.io/projected/4ece4c32-0d94-4394-adb1-7b88134eeae7-kube-api-access-7fcdj\") pod \"4ece4c32-0d94-4394-adb1-7b88134eeae7\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.596149 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ece4c32-0d94-4394-adb1-7b88134eeae7-operator-scripts\") pod \"4ece4c32-0d94-4394-adb1-7b88134eeae7\" (UID: \"4ece4c32-0d94-4394-adb1-7b88134eeae7\") " Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.596806 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ece4c32-0d94-4394-adb1-7b88134eeae7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ece4c32-0d94-4394-adb1-7b88134eeae7" (UID: "4ece4c32-0d94-4394-adb1-7b88134eeae7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.599949 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ece4c32-0d94-4394-adb1-7b88134eeae7-kube-api-access-7fcdj" (OuterVolumeSpecName: "kube-api-access-7fcdj") pod "4ece4c32-0d94-4394-adb1-7b88134eeae7" (UID: "4ece4c32-0d94-4394-adb1-7b88134eeae7"). InnerVolumeSpecName "kube-api-access-7fcdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.697963 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fcdj\" (UniqueName: \"kubernetes.io/projected/4ece4c32-0d94-4394-adb1-7b88134eeae7-kube-api-access-7fcdj\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:35 crc kubenswrapper[4813]: I0320 16:22:35.698026 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ece4c32-0d94-4394-adb1-7b88134eeae7-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:36 crc kubenswrapper[4813]: I0320 16:22:36.115816 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerStarted","Data":"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2"} Mar 20 16:22:36 crc kubenswrapper[4813]: I0320 16:22:36.118987 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" event={"ID":"4ece4c32-0d94-4394-adb1-7b88134eeae7","Type":"ContainerDied","Data":"a5ad2b2278fb4eb54b72254544f2206460884006051f0cf95dbe56ddad0806a7"} Mar 20 16:22:36 crc kubenswrapper[4813]: I0320 16:22:36.119013 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5ad2b2278fb4eb54b72254544f2206460884006051f0cf95dbe56ddad0806a7" Mar 20 16:22:36 crc kubenswrapper[4813]: I0320 16:22:36.119063 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherfb40-account-delete-vrfbb" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.127745 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.129842 4813 generic.go:334] "Generic (PLEG): container finished" podID="f84babe6-dcb6-4993-a350-60033d580a9f" containerID="14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" exitCode=0 Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.129879 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f84babe6-dcb6-4993-a350-60033d580a9f","Type":"ContainerDied","Data":"14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b"} Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.129903 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f84babe6-dcb6-4993-a350-60033d580a9f","Type":"ContainerDied","Data":"ca9bc41410d6994fd90e7b4cf4c9e9668197809c369467e65527a755cdfae991"} Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.129919 4813 scope.go:117] "RemoveContainer" containerID="14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.210706 4813 scope.go:117] "RemoveContainer" containerID="14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" Mar 20 16:22:37 crc kubenswrapper[4813]: E0320 16:22:37.211048 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b\": container with ID starting with 14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b not found: ID does not exist" containerID="14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.211079 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b"} err="failed to get container status \"14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b\": rpc error: code = NotFound desc = could not find container \"14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b\": container with ID starting with 14775e2b84c2cf531702f674cbda9c64c1e608aaa0a02cdeaa08d69211b20b4b not found: ID does not exist" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.221673 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd484\" (UniqueName: \"kubernetes.io/projected/f84babe6-dcb6-4993-a350-60033d580a9f-kube-api-access-jd484\") pod \"f84babe6-dcb6-4993-a350-60033d580a9f\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.221987 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-combined-ca-bundle\") pod \"f84babe6-dcb6-4993-a350-60033d580a9f\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.222044 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-cert-memcached-mtls\") pod \"f84babe6-dcb6-4993-a350-60033d580a9f\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.222063 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f84babe6-dcb6-4993-a350-60033d580a9f-logs\") pod \"f84babe6-dcb6-4993-a350-60033d580a9f\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.222095 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-config-data\") pod \"f84babe6-dcb6-4993-a350-60033d580a9f\" (UID: \"f84babe6-dcb6-4993-a350-60033d580a9f\") " Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.223215 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f84babe6-dcb6-4993-a350-60033d580a9f-logs" (OuterVolumeSpecName: "logs") pod "f84babe6-dcb6-4993-a350-60033d580a9f" (UID: "f84babe6-dcb6-4993-a350-60033d580a9f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.232961 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f84babe6-dcb6-4993-a350-60033d580a9f-kube-api-access-jd484" (OuterVolumeSpecName: "kube-api-access-jd484") pod "f84babe6-dcb6-4993-a350-60033d580a9f" (UID: "f84babe6-dcb6-4993-a350-60033d580a9f"). InnerVolumeSpecName "kube-api-access-jd484". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.246957 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f84babe6-dcb6-4993-a350-60033d580a9f" (UID: "f84babe6-dcb6-4993-a350-60033d580a9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.266615 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-config-data" (OuterVolumeSpecName: "config-data") pod "f84babe6-dcb6-4993-a350-60033d580a9f" (UID: "f84babe6-dcb6-4993-a350-60033d580a9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.282738 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "f84babe6-dcb6-4993-a350-60033d580a9f" (UID: "f84babe6-dcb6-4993-a350-60033d580a9f"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.325386 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd484\" (UniqueName: \"kubernetes.io/projected/f84babe6-dcb6-4993-a350-60033d580a9f-kube-api-access-jd484\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.325420 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.325432 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.325445 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f84babe6-dcb6-4993-a350-60033d580a9f-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.325456 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f84babe6-dcb6-4993-a350-60033d580a9f-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.367065 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-wlj62"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.381155 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-wlj62"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.391538 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-fb40-account-create-update-dgj58"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.397701 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcherfb40-account-delete-vrfbb"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.404872 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-fb40-account-create-update-dgj58"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.413620 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcherfb40-account-delete-vrfbb"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.480893 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-jmjd8"] Mar 20 16:22:37 crc kubenswrapper[4813]: E0320 16:22:37.481285 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-api" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481308 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-api" Mar 20 16:22:37 crc kubenswrapper[4813]: E0320 16:22:37.481327 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f84babe6-dcb6-4993-a350-60033d580a9f" containerName="watcher-applier" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481337 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f84babe6-dcb6-4993-a350-60033d580a9f" containerName="watcher-applier" Mar 20 16:22:37 crc kubenswrapper[4813]: E0320 16:22:37.481364 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-kuttl-api-log" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481373 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-kuttl-api-log" Mar 20 16:22:37 crc kubenswrapper[4813]: E0320 16:22:37.481393 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ece4c32-0d94-4394-adb1-7b88134eeae7" containerName="mariadb-account-delete" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481402 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ece4c32-0d94-4394-adb1-7b88134eeae7" containerName="mariadb-account-delete" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481590 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-kuttl-api-log" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481614 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ece4c32-0d94-4394-adb1-7b88134eeae7" containerName="mariadb-account-delete" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481634 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f84babe6-dcb6-4993-a350-60033d580a9f" containerName="watcher-applier" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.481644 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-api" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.482189 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.487407 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-jmjd8"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.567563 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-test-account-create-update-8xnqh"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.570077 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.580149 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.581198 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-test-account-create-update-8xnqh"] Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.629261 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzxwm\" (UniqueName: \"kubernetes.io/projected/b5cecfcd-3911-40f8-924a-cd14f75c81ff-kube-api-access-tzxwm\") pod \"watcher-test-account-create-update-8xnqh\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.629358 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/651ff3dc-d168-41b8-8bd9-fd7018a46024-operator-scripts\") pod \"watcher-db-create-jmjd8\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.629382 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gh2d\" (UniqueName: \"kubernetes.io/projected/651ff3dc-d168-41b8-8bd9-fd7018a46024-kube-api-access-7gh2d\") pod \"watcher-db-create-jmjd8\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.629407 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5cecfcd-3911-40f8-924a-cd14f75c81ff-operator-scripts\") pod \"watcher-test-account-create-update-8xnqh\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.731909 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/651ff3dc-d168-41b8-8bd9-fd7018a46024-operator-scripts\") pod \"watcher-db-create-jmjd8\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.731966 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gh2d\" (UniqueName: \"kubernetes.io/projected/651ff3dc-d168-41b8-8bd9-fd7018a46024-kube-api-access-7gh2d\") pod \"watcher-db-create-jmjd8\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.731996 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5cecfcd-3911-40f8-924a-cd14f75c81ff-operator-scripts\") pod \"watcher-test-account-create-update-8xnqh\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.732055 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzxwm\" (UniqueName: \"kubernetes.io/projected/b5cecfcd-3911-40f8-924a-cd14f75c81ff-kube-api-access-tzxwm\") pod \"watcher-test-account-create-update-8xnqh\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.733099 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/651ff3dc-d168-41b8-8bd9-fd7018a46024-operator-scripts\") pod \"watcher-db-create-jmjd8\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.733327 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5cecfcd-3911-40f8-924a-cd14f75c81ff-operator-scripts\") pod \"watcher-test-account-create-update-8xnqh\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.753990 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzxwm\" (UniqueName: \"kubernetes.io/projected/b5cecfcd-3911-40f8-924a-cd14f75c81ff-kube-api-access-tzxwm\") pod \"watcher-test-account-create-update-8xnqh\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.754455 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gh2d\" (UniqueName: \"kubernetes.io/projected/651ff3dc-d168-41b8-8bd9-fd7018a46024-kube-api-access-7gh2d\") pod \"watcher-db-create-jmjd8\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.798886 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:37 crc kubenswrapper[4813]: I0320 16:22:37.907338 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.152012 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.172732 4813 generic.go:334] "Generic (PLEG): container finished" podID="8efe1f82-92b0-40a0-839b-a44a005c093a" containerID="15fb21a042f437648d72d3263f576533c324680cefa18990e35e15e4e14caab0" exitCode=0 Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.172782 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"8efe1f82-92b0-40a0-839b-a44a005c093a","Type":"ContainerDied","Data":"15fb21a042f437648d72d3263f576533c324680cefa18990e35e15e4e14caab0"} Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.197094 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.203232 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.208796 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerStarted","Data":"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9"} Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.208968 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-central-agent" containerID="cri-o://88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" gracePeriod=30 Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.209240 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.209456 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="proxy-httpd" containerID="cri-o://e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" gracePeriod=30 Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.209515 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="sg-core" containerID="cri-o://264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" gracePeriod=30 Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.209547 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-notification-agent" containerID="cri-o://202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" gracePeriod=30 Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.242792 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.337102513 podStartE2EDuration="6.242776832s" podCreationTimestamp="2026-03-20 16:22:32 +0000 UTC" firstStartedPulling="2026-03-20 16:22:33.077887255 +0000 UTC m=+2682.500590096" lastFinishedPulling="2026-03-20 16:22:36.983561573 +0000 UTC m=+2686.406264415" observedRunningTime="2026-03-20 16:22:38.233921373 +0000 UTC m=+2687.656624214" watchObservedRunningTime="2026-03-20 16:22:38.242776832 +0000 UTC m=+2687.665479673" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.265275 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:22:38 crc kubenswrapper[4813]: E0320 16:22:38.265607 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.316305 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-jmjd8"] Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.389666 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-test-account-create-update-8xnqh"] Mar 20 16:22:38 crc kubenswrapper[4813]: W0320 16:22:38.403814 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5cecfcd_3911_40f8_924a_cd14f75c81ff.slice/crio-f95fee8b8f83aaddcd9e546e17bda46314f049736d4373d45f8bd0d6c6cea2c5 WatchSource:0}: Error finding container f95fee8b8f83aaddcd9e546e17bda46314f049736d4373d45f8bd0d6c6cea2c5: Status 404 returned error can't find the container with id f95fee8b8f83aaddcd9e546e17bda46314f049736d4373d45f8bd0d6c6cea2c5 Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.510079 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.552691 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-cert-memcached-mtls\") pod \"8efe1f82-92b0-40a0-839b-a44a005c093a\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.553048 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmp6l\" (UniqueName: \"kubernetes.io/projected/8efe1f82-92b0-40a0-839b-a44a005c093a-kube-api-access-nmp6l\") pod \"8efe1f82-92b0-40a0-839b-a44a005c093a\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.553080 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-custom-prometheus-ca\") pod \"8efe1f82-92b0-40a0-839b-a44a005c093a\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.553124 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-combined-ca-bundle\") pod \"8efe1f82-92b0-40a0-839b-a44a005c093a\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.553170 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8efe1f82-92b0-40a0-839b-a44a005c093a-logs\") pod \"8efe1f82-92b0-40a0-839b-a44a005c093a\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.553196 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-config-data\") pod \"8efe1f82-92b0-40a0-839b-a44a005c093a\" (UID: \"8efe1f82-92b0-40a0-839b-a44a005c093a\") " Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.565171 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8efe1f82-92b0-40a0-839b-a44a005c093a-logs" (OuterVolumeSpecName: "logs") pod "8efe1f82-92b0-40a0-839b-a44a005c093a" (UID: "8efe1f82-92b0-40a0-839b-a44a005c093a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.583662 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8efe1f82-92b0-40a0-839b-a44a005c093a-kube-api-access-nmp6l" (OuterVolumeSpecName: "kube-api-access-nmp6l") pod "8efe1f82-92b0-40a0-839b-a44a005c093a" (UID: "8efe1f82-92b0-40a0-839b-a44a005c093a"). InnerVolumeSpecName "kube-api-access-nmp6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.619892 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "8efe1f82-92b0-40a0-839b-a44a005c093a" (UID: "8efe1f82-92b0-40a0-839b-a44a005c093a"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.620745 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8efe1f82-92b0-40a0-839b-a44a005c093a" (UID: "8efe1f82-92b0-40a0-839b-a44a005c093a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.620962 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-config-data" (OuterVolumeSpecName: "config-data") pod "8efe1f82-92b0-40a0-839b-a44a005c093a" (UID: "8efe1f82-92b0-40a0-839b-a44a005c093a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.654655 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmp6l\" (UniqueName: \"kubernetes.io/projected/8efe1f82-92b0-40a0-839b-a44a005c093a-kube-api-access-nmp6l\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.654692 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.654703 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.654716 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8efe1f82-92b0-40a0-839b-a44a005c093a-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.654726 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.690626 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "8efe1f82-92b0-40a0-839b-a44a005c093a" (UID: "8efe1f82-92b0-40a0-839b-a44a005c093a"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:38 crc kubenswrapper[4813]: I0320 16:22:38.755891 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/8efe1f82-92b0-40a0-839b-a44a005c093a-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.218658 4813 generic.go:334] "Generic (PLEG): container finished" podID="b5cecfcd-3911-40f8-924a-cd14f75c81ff" containerID="bdcf09e85c4ef150f45b2a4da56f2f230e560fc2426d35e5a7a5579772bfc071" exitCode=0 Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.218758 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" event={"ID":"b5cecfcd-3911-40f8-924a-cd14f75c81ff","Type":"ContainerDied","Data":"bdcf09e85c4ef150f45b2a4da56f2f230e560fc2426d35e5a7a5579772bfc071"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.219065 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" event={"ID":"b5cecfcd-3911-40f8-924a-cd14f75c81ff","Type":"ContainerStarted","Data":"f95fee8b8f83aaddcd9e546e17bda46314f049736d4373d45f8bd0d6c6cea2c5"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.222045 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"8efe1f82-92b0-40a0-839b-a44a005c093a","Type":"ContainerDied","Data":"89012956bd31e474087db8ca83751b9826f09b506432bdf5258d6d1a937ccbfb"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.222065 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.222098 4813 scope.go:117] "RemoveContainer" containerID="15fb21a042f437648d72d3263f576533c324680cefa18990e35e15e4e14caab0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.223043 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.224750 4813 generic.go:334] "Generic (PLEG): container finished" podID="651ff3dc-d168-41b8-8bd9-fd7018a46024" containerID="2ad6eeaa655d9384ff0a27290dae6ae1dc1d68b5c8bf9588b0e8a2898258b885" exitCode=0 Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.224852 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-jmjd8" event={"ID":"651ff3dc-d168-41b8-8bd9-fd7018a46024","Type":"ContainerDied","Data":"2ad6eeaa655d9384ff0a27290dae6ae1dc1d68b5c8bf9588b0e8a2898258b885"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.224894 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-jmjd8" event={"ID":"651ff3dc-d168-41b8-8bd9-fd7018a46024","Type":"ContainerStarted","Data":"4d6d1aad88e194ca082f106114dba62cd940240bc1cf04af7297a19213163fc1"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.228624 4813 generic.go:334] "Generic (PLEG): container finished" podID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerID="e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" exitCode=0 Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.228646 4813 generic.go:334] "Generic (PLEG): container finished" podID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerID="264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" exitCode=2 Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.228653 4813 generic.go:334] "Generic (PLEG): container finished" podID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerID="202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" exitCode=0 Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.228660 4813 generic.go:334] "Generic (PLEG): container finished" podID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerID="88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" exitCode=0 Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.228680 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerDied","Data":"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.228696 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.228704 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerDied","Data":"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.229058 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerDied","Data":"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.229074 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerDied","Data":"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.229083 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"2408026e-3bdb-4444-a098-0d7c94c0a4d3","Type":"ContainerDied","Data":"aceb4e3ac839645c1637cd04047b70b7f59885dc9badd18065c9c486eda881a5"} Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.257812 4813 scope.go:117] "RemoveContainer" containerID="e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264039 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-log-httpd\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264079 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-config-data\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264131 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-run-httpd\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264201 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-combined-ca-bundle\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264245 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-ceilometer-tls-certs\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264262 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-scripts\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264298 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5f8n2\" (UniqueName: \"kubernetes.io/projected/2408026e-3bdb-4444-a098-0d7c94c0a4d3-kube-api-access-5f8n2\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264356 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-sg-core-conf-yaml\") pod \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\" (UID: \"2408026e-3bdb-4444-a098-0d7c94c0a4d3\") " Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264419 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264525 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264677 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.264690 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2408026e-3bdb-4444-a098-0d7c94c0a4d3-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.272746 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2408026e-3bdb-4444-a098-0d7c94c0a4d3-kube-api-access-5f8n2" (OuterVolumeSpecName: "kube-api-access-5f8n2") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "kube-api-access-5f8n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.277365 4813 scope.go:117] "RemoveContainer" containerID="264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.303547 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-scripts" (OuterVolumeSpecName: "scripts") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.321068 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.325746 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d18dcc1-59f7-4fc8-95df-e48ff2af539b" path="/var/lib/kubelet/pods/1d18dcc1-59f7-4fc8-95df-e48ff2af539b/volumes" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.326217 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ece4c32-0d94-4394-adb1-7b88134eeae7" path="/var/lib/kubelet/pods/4ece4c32-0d94-4394-adb1-7b88134eeae7/volumes" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.326728 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a674e50a-af44-40a0-af3b-b2943207d4be" path="/var/lib/kubelet/pods/a674e50a-af44-40a0-af3b-b2943207d4be/volumes" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.333658 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f84babe6-dcb6-4993-a350-60033d580a9f" path="/var/lib/kubelet/pods/f84babe6-dcb6-4993-a350-60033d580a9f/volumes" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.336366 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.336399 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.346040 4813 scope.go:117] "RemoveContainer" containerID="202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.349920 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.366973 4813 scope.go:117] "RemoveContainer" containerID="88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.368262 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.368508 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.368538 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5f8n2\" (UniqueName: \"kubernetes.io/projected/2408026e-3bdb-4444-a098-0d7c94c0a4d3-kube-api-access-5f8n2\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.368549 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.374217 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.379195 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-config-data" (OuterVolumeSpecName: "config-data") pod "2408026e-3bdb-4444-a098-0d7c94c0a4d3" (UID: "2408026e-3bdb-4444-a098-0d7c94c0a4d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.391687 4813 scope.go:117] "RemoveContainer" containerID="e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.392122 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": container with ID starting with e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9 not found: ID does not exist" containerID="e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.392163 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9"} err="failed to get container status \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": rpc error: code = NotFound desc = could not find container \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": container with ID starting with e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.392188 4813 scope.go:117] "RemoveContainer" containerID="264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.392500 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": container with ID starting with 264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2 not found: ID does not exist" containerID="264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.392533 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2"} err="failed to get container status \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": rpc error: code = NotFound desc = could not find container \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": container with ID starting with 264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.392558 4813 scope.go:117] "RemoveContainer" containerID="202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.392877 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": container with ID starting with 202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015 not found: ID does not exist" containerID="202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.392908 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015"} err="failed to get container status \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": rpc error: code = NotFound desc = could not find container \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": container with ID starting with 202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.392924 4813 scope.go:117] "RemoveContainer" containerID="88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.393351 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": container with ID starting with 88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182 not found: ID does not exist" containerID="88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.393397 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182"} err="failed to get container status \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": rpc error: code = NotFound desc = could not find container \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": container with ID starting with 88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.393429 4813 scope.go:117] "RemoveContainer" containerID="e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.393698 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9"} err="failed to get container status \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": rpc error: code = NotFound desc = could not find container \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": container with ID starting with e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.393723 4813 scope.go:117] "RemoveContainer" containerID="264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.393978 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2"} err="failed to get container status \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": rpc error: code = NotFound desc = could not find container \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": container with ID starting with 264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.394000 4813 scope.go:117] "RemoveContainer" containerID="202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.394378 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015"} err="failed to get container status \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": rpc error: code = NotFound desc = could not find container \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": container with ID starting with 202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.394400 4813 scope.go:117] "RemoveContainer" containerID="88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.394751 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182"} err="failed to get container status \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": rpc error: code = NotFound desc = could not find container \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": container with ID starting with 88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.394774 4813 scope.go:117] "RemoveContainer" containerID="e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395122 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9"} err="failed to get container status \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": rpc error: code = NotFound desc = could not find container \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": container with ID starting with e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395140 4813 scope.go:117] "RemoveContainer" containerID="264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395325 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2"} err="failed to get container status \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": rpc error: code = NotFound desc = could not find container \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": container with ID starting with 264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395343 4813 scope.go:117] "RemoveContainer" containerID="202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395531 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015"} err="failed to get container status \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": rpc error: code = NotFound desc = could not find container \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": container with ID starting with 202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395554 4813 scope.go:117] "RemoveContainer" containerID="88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395799 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182"} err="failed to get container status \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": rpc error: code = NotFound desc = could not find container \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": container with ID starting with 88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.395818 4813 scope.go:117] "RemoveContainer" containerID="e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.396005 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9"} err="failed to get container status \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": rpc error: code = NotFound desc = could not find container \"e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9\": container with ID starting with e7fb061b222b19bc5c0ef3b64ef6eeadf7864de20d657f29f1b542390d5e7db9 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.396026 4813 scope.go:117] "RemoveContainer" containerID="264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.396180 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2"} err="failed to get container status \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": rpc error: code = NotFound desc = could not find container \"264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2\": container with ID starting with 264d90b259ad4e0185bd1ff0ab1f464a43e65cb052517977ec64ad355ba8f7c2 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.396197 4813 scope.go:117] "RemoveContainer" containerID="202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.396368 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015"} err="failed to get container status \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": rpc error: code = NotFound desc = could not find container \"202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015\": container with ID starting with 202e7bfc7a12fb447bc1ec93d28788d94e2090a76b1967a30cc8b8f7f079d015 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.396390 4813 scope.go:117] "RemoveContainer" containerID="88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.396579 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182"} err="failed to get container status \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": rpc error: code = NotFound desc = could not find container \"88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182\": container with ID starting with 88964c30575efebfccf84133858c3d47e094b1e7a52710b2fa8319203a16f182 not found: ID does not exist" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.427018 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.218:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.427133 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="7b44c66b-efb4-43f7-b4f6-335c4f8e4dd8" containerName="watcher-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.218:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.470848 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.470900 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2408026e-3bdb-4444-a098-0d7c94c0a4d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.558621 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.567005 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.587705 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.588099 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-central-agent" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588120 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-central-agent" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.588137 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-notification-agent" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588146 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-notification-agent" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.588161 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="proxy-httpd" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588169 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="proxy-httpd" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.588198 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8efe1f82-92b0-40a0-839b-a44a005c093a" containerName="watcher-decision-engine" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588205 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8efe1f82-92b0-40a0-839b-a44a005c093a" containerName="watcher-decision-engine" Mar 20 16:22:39 crc kubenswrapper[4813]: E0320 16:22:39.588220 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="sg-core" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588228 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="sg-core" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588397 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="proxy-httpd" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588411 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="sg-core" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588428 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-notification-agent" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588444 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" containerName="ceilometer-central-agent" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.588456 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8efe1f82-92b0-40a0-839b-a44a005c093a" containerName="watcher-decision-engine" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.590604 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.594248 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.594322 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.594329 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.601958 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.673383 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.673461 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.673665 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtcqn\" (UniqueName: \"kubernetes.io/projected/29861606-f4eb-4223-8285-f6da9902e378-kube-api-access-jtcqn\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.673734 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.673886 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-config-data\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.673965 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-log-httpd\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.674011 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-scripts\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.674035 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-run-httpd\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775441 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-config-data\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775517 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-log-httpd\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775543 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-scripts\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775566 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-run-httpd\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775609 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775669 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtcqn\" (UniqueName: \"kubernetes.io/projected/29861606-f4eb-4223-8285-f6da9902e378-kube-api-access-jtcqn\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.775726 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.776089 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-log-httpd\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.776162 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-run-httpd\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.779024 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.779226 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-scripts\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.779292 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.780091 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-config-data\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.780931 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.792930 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtcqn\" (UniqueName: \"kubernetes.io/projected/29861606-f4eb-4223-8285-f6da9902e378-kube-api-access-jtcqn\") pod \"ceilometer-0\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:39 crc kubenswrapper[4813]: I0320 16:22:39.908013 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.380907 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.678337 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.687080 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.796275 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gh2d\" (UniqueName: \"kubernetes.io/projected/651ff3dc-d168-41b8-8bd9-fd7018a46024-kube-api-access-7gh2d\") pod \"651ff3dc-d168-41b8-8bd9-fd7018a46024\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.796345 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzxwm\" (UniqueName: \"kubernetes.io/projected/b5cecfcd-3911-40f8-924a-cd14f75c81ff-kube-api-access-tzxwm\") pod \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.796470 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5cecfcd-3911-40f8-924a-cd14f75c81ff-operator-scripts\") pod \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\" (UID: \"b5cecfcd-3911-40f8-924a-cd14f75c81ff\") " Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.796533 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/651ff3dc-d168-41b8-8bd9-fd7018a46024-operator-scripts\") pod \"651ff3dc-d168-41b8-8bd9-fd7018a46024\" (UID: \"651ff3dc-d168-41b8-8bd9-fd7018a46024\") " Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.797760 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5cecfcd-3911-40f8-924a-cd14f75c81ff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b5cecfcd-3911-40f8-924a-cd14f75c81ff" (UID: "b5cecfcd-3911-40f8-924a-cd14f75c81ff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.798568 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/651ff3dc-d168-41b8-8bd9-fd7018a46024-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "651ff3dc-d168-41b8-8bd9-fd7018a46024" (UID: "651ff3dc-d168-41b8-8bd9-fd7018a46024"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.805690 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/651ff3dc-d168-41b8-8bd9-fd7018a46024-kube-api-access-7gh2d" (OuterVolumeSpecName: "kube-api-access-7gh2d") pod "651ff3dc-d168-41b8-8bd9-fd7018a46024" (UID: "651ff3dc-d168-41b8-8bd9-fd7018a46024"). InnerVolumeSpecName "kube-api-access-7gh2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.809726 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5cecfcd-3911-40f8-924a-cd14f75c81ff-kube-api-access-tzxwm" (OuterVolumeSpecName: "kube-api-access-tzxwm") pod "b5cecfcd-3911-40f8-924a-cd14f75c81ff" (UID: "b5cecfcd-3911-40f8-924a-cd14f75c81ff"). InnerVolumeSpecName "kube-api-access-tzxwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.898828 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5cecfcd-3911-40f8-924a-cd14f75c81ff-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.899174 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/651ff3dc-d168-41b8-8bd9-fd7018a46024-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.899188 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gh2d\" (UniqueName: \"kubernetes.io/projected/651ff3dc-d168-41b8-8bd9-fd7018a46024-kube-api-access-7gh2d\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:40 crc kubenswrapper[4813]: I0320 16:22:40.899244 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzxwm\" (UniqueName: \"kubernetes.io/projected/b5cecfcd-3911-40f8-924a-cd14f75c81ff-kube-api-access-tzxwm\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.253547 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-jmjd8" event={"ID":"651ff3dc-d168-41b8-8bd9-fd7018a46024","Type":"ContainerDied","Data":"4d6d1aad88e194ca082f106114dba62cd940240bc1cf04af7297a19213163fc1"} Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.253595 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d6d1aad88e194ca082f106114dba62cd940240bc1cf04af7297a19213163fc1" Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.253653 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-jmjd8" Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.259103 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerStarted","Data":"86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e"} Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.259169 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerStarted","Data":"7a0bb5999b31a2ff1f84bff633ae197da802a0e6bbb0fa8c0f659e03d5463935"} Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.261241 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" event={"ID":"b5cecfcd-3911-40f8-924a-cd14f75c81ff","Type":"ContainerDied","Data":"f95fee8b8f83aaddcd9e546e17bda46314f049736d4373d45f8bd0d6c6cea2c5"} Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.261301 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f95fee8b8f83aaddcd9e546e17bda46314f049736d4373d45f8bd0d6c6cea2c5" Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.261372 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-test-account-create-update-8xnqh" Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.282993 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2408026e-3bdb-4444-a098-0d7c94c0a4d3" path="/var/lib/kubelet/pods/2408026e-3bdb-4444-a098-0d7c94c0a4d3/volumes" Mar 20 16:22:41 crc kubenswrapper[4813]: I0320 16:22:41.284096 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8efe1f82-92b0-40a0-839b-a44a005c093a" path="/var/lib/kubelet/pods/8efe1f82-92b0-40a0-839b-a44a005c093a/volumes" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.270326 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerStarted","Data":"04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283"} Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.270830 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerStarted","Data":"1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39"} Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.897960 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl"] Mar 20 16:22:42 crc kubenswrapper[4813]: E0320 16:22:42.898322 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="651ff3dc-d168-41b8-8bd9-fd7018a46024" containerName="mariadb-database-create" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.898340 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="651ff3dc-d168-41b8-8bd9-fd7018a46024" containerName="mariadb-database-create" Mar 20 16:22:42 crc kubenswrapper[4813]: E0320 16:22:42.898360 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5cecfcd-3911-40f8-924a-cd14f75c81ff" containerName="mariadb-account-create-update" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.898367 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5cecfcd-3911-40f8-924a-cd14f75c81ff" containerName="mariadb-account-create-update" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.898523 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5cecfcd-3911-40f8-924a-cd14f75c81ff" containerName="mariadb-account-create-update" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.898541 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="651ff3dc-d168-41b8-8bd9-fd7018a46024" containerName="mariadb-database-create" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.899053 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.903038 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-mmlpj" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.903258 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:22:42 crc kubenswrapper[4813]: I0320 16:22:42.906877 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl"] Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.042819 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.042889 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrslm\" (UniqueName: \"kubernetes.io/projected/b0ee1311-0844-4018-81dc-0b0fed02378c-kube-api-access-nrslm\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.042968 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-db-sync-config-data\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.042993 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-config-data\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.144862 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrslm\" (UniqueName: \"kubernetes.io/projected/b0ee1311-0844-4018-81dc-0b0fed02378c-kube-api-access-nrslm\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.144947 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-db-sync-config-data\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.144974 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-config-data\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.145131 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.149455 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.150310 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-db-sync-config-data\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.164572 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-config-data\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.164804 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrslm\" (UniqueName: \"kubernetes.io/projected/b0ee1311-0844-4018-81dc-0b0fed02378c-kube-api-access-nrslm\") pod \"watcher-kuttl-db-sync-z5pzl\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.215176 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:43 crc kubenswrapper[4813]: I0320 16:22:43.735573 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl"] Mar 20 16:22:44 crc kubenswrapper[4813]: I0320 16:22:44.288020 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerStarted","Data":"8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f"} Mar 20 16:22:44 crc kubenswrapper[4813]: I0320 16:22:44.288411 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:22:44 crc kubenswrapper[4813]: I0320 16:22:44.289529 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" event={"ID":"b0ee1311-0844-4018-81dc-0b0fed02378c","Type":"ContainerStarted","Data":"2572dd65a89aca93eb1ecec3947667d411cca61d300b1942b0d0d8ccc966f394"} Mar 20 16:22:44 crc kubenswrapper[4813]: I0320 16:22:44.289561 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" event={"ID":"b0ee1311-0844-4018-81dc-0b0fed02378c","Type":"ContainerStarted","Data":"88cf1fbd1e997170af5e4a14df5b5145cf65b509d601df7de7e9195a86163088"} Mar 20 16:22:44 crc kubenswrapper[4813]: I0320 16:22:44.319580 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.829701366 podStartE2EDuration="5.319557008s" podCreationTimestamp="2026-03-20 16:22:39 +0000 UTC" firstStartedPulling="2026-03-20 16:22:40.396597323 +0000 UTC m=+2689.819300164" lastFinishedPulling="2026-03-20 16:22:43.886452965 +0000 UTC m=+2693.309155806" observedRunningTime="2026-03-20 16:22:44.313772042 +0000 UTC m=+2693.736474903" watchObservedRunningTime="2026-03-20 16:22:44.319557008 +0000 UTC m=+2693.742259849" Mar 20 16:22:44 crc kubenswrapper[4813]: I0320 16:22:44.329732 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" podStartSLOduration=2.329713212 podStartE2EDuration="2.329713212s" podCreationTimestamp="2026-03-20 16:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:44.326544187 +0000 UTC m=+2693.749247028" watchObservedRunningTime="2026-03-20 16:22:44.329713212 +0000 UTC m=+2693.752416053" Mar 20 16:22:46 crc kubenswrapper[4813]: I0320 16:22:46.309000 4813 generic.go:334] "Generic (PLEG): container finished" podID="b0ee1311-0844-4018-81dc-0b0fed02378c" containerID="2572dd65a89aca93eb1ecec3947667d411cca61d300b1942b0d0d8ccc966f394" exitCode=0 Mar 20 16:22:46 crc kubenswrapper[4813]: I0320 16:22:46.309093 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" event={"ID":"b0ee1311-0844-4018-81dc-0b0fed02378c","Type":"ContainerDied","Data":"2572dd65a89aca93eb1ecec3947667d411cca61d300b1942b0d0d8ccc966f394"} Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.699334 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.830217 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-combined-ca-bundle\") pod \"b0ee1311-0844-4018-81dc-0b0fed02378c\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.830316 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-config-data\") pod \"b0ee1311-0844-4018-81dc-0b0fed02378c\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.830347 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrslm\" (UniqueName: \"kubernetes.io/projected/b0ee1311-0844-4018-81dc-0b0fed02378c-kube-api-access-nrslm\") pod \"b0ee1311-0844-4018-81dc-0b0fed02378c\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.830996 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-db-sync-config-data\") pod \"b0ee1311-0844-4018-81dc-0b0fed02378c\" (UID: \"b0ee1311-0844-4018-81dc-0b0fed02378c\") " Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.835774 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0ee1311-0844-4018-81dc-0b0fed02378c-kube-api-access-nrslm" (OuterVolumeSpecName: "kube-api-access-nrslm") pod "b0ee1311-0844-4018-81dc-0b0fed02378c" (UID: "b0ee1311-0844-4018-81dc-0b0fed02378c"). InnerVolumeSpecName "kube-api-access-nrslm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.850810 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b0ee1311-0844-4018-81dc-0b0fed02378c" (UID: "b0ee1311-0844-4018-81dc-0b0fed02378c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.854588 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0ee1311-0844-4018-81dc-0b0fed02378c" (UID: "b0ee1311-0844-4018-81dc-0b0fed02378c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.899794 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-config-data" (OuterVolumeSpecName: "config-data") pod "b0ee1311-0844-4018-81dc-0b0fed02378c" (UID: "b0ee1311-0844-4018-81dc-0b0fed02378c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.933661 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.933690 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.933699 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrslm\" (UniqueName: \"kubernetes.io/projected/b0ee1311-0844-4018-81dc-0b0fed02378c-kube-api-access-nrslm\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:47 crc kubenswrapper[4813]: I0320 16:22:47.933711 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0ee1311-0844-4018-81dc-0b0fed02378c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:22:48 crc kubenswrapper[4813]: I0320 16:22:48.329004 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" event={"ID":"b0ee1311-0844-4018-81dc-0b0fed02378c","Type":"ContainerDied","Data":"88cf1fbd1e997170af5e4a14df5b5145cf65b509d601df7de7e9195a86163088"} Mar 20 16:22:48 crc kubenswrapper[4813]: I0320 16:22:48.329041 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88cf1fbd1e997170af5e4a14df5b5145cf65b509d601df7de7e9195a86163088" Mar 20 16:22:48 crc kubenswrapper[4813]: I0320 16:22:48.329096 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.013353 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:49 crc kubenswrapper[4813]: E0320 16:22:49.013718 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0ee1311-0844-4018-81dc-0b0fed02378c" containerName="watcher-kuttl-db-sync" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.013731 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0ee1311-0844-4018-81dc-0b0fed02378c" containerName="watcher-kuttl-db-sync" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.013897 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0ee1311-0844-4018-81dc-0b0fed02378c" containerName="watcher-kuttl-db-sync" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.014750 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.018724 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.018932 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-mmlpj" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.025043 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.027912 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.037385 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.062523 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.117509 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.119514 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.121239 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.128025 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162009 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg6w2\" (UniqueName: \"kubernetes.io/projected/7b938e62-0adc-4638-92a4-9d598276519e-kube-api-access-bg6w2\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162070 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162105 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-custom-prometheus-ca\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162131 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dff83d43-1e8c-4996-a4bd-93def70f4050-logs\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162147 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162164 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbkht\" (UniqueName: \"kubernetes.io/projected/dff83d43-1e8c-4996-a4bd-93def70f4050-kube-api-access-vbkht\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162186 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-cert-memcached-mtls\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162208 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-combined-ca-bundle\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162226 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162253 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162291 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-config-data\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.162316 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b938e62-0adc-4638-92a4-9d598276519e-logs\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.182078 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.183112 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.188484 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.190146 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.263839 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.263885 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-custom-prometheus-ca\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.263951 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dff83d43-1e8c-4996-a4bd-93def70f4050-logs\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.263972 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264661 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbkht\" (UniqueName: \"kubernetes.io/projected/dff83d43-1e8c-4996-a4bd-93def70f4050-kube-api-access-vbkht\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264694 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264732 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264769 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-cert-memcached-mtls\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264794 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264833 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-combined-ca-bundle\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264859 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264886 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264910 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264949 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pdz9\" (UniqueName: \"kubernetes.io/projected/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-kube-api-access-4pdz9\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264976 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.265021 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.265050 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.265088 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.265130 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-config-data\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.265234 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b938e62-0adc-4638-92a4-9d598276519e-logs\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.264679 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dff83d43-1e8c-4996-a4bd-93def70f4050-logs\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.265332 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.266436 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:22:49 crc kubenswrapper[4813]: E0320 16:22:49.266773 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.266880 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg6w2\" (UniqueName: \"kubernetes.io/projected/7b938e62-0adc-4638-92a4-9d598276519e-kube-api-access-bg6w2\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.267610 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdwmv\" (UniqueName: \"kubernetes.io/projected/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-kube-api-access-kdwmv\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.268396 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-combined-ca-bundle\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.268555 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.268637 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b938e62-0adc-4638-92a4-9d598276519e-logs\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.270092 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-custom-prometheus-ca\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.271718 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-cert-memcached-mtls\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.272455 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.272790 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.272881 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-config-data\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.273721 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.284907 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbkht\" (UniqueName: \"kubernetes.io/projected/dff83d43-1e8c-4996-a4bd-93def70f4050-kube-api-access-vbkht\") pod \"watcher-kuttl-api-0\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.288824 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg6w2\" (UniqueName: \"kubernetes.io/projected/7b938e62-0adc-4638-92a4-9d598276519e-kube-api-access-bg6w2\") pod \"watcher-kuttl-api-1\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.334593 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.361713 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.370823 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.370873 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.370894 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.370973 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.370991 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.371034 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pdz9\" (UniqueName: \"kubernetes.io/projected/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-kube-api-access-4pdz9\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.371091 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.371107 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.371154 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.371213 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.371270 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdwmv\" (UniqueName: \"kubernetes.io/projected/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-kube-api-access-kdwmv\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.371954 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.375226 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.376464 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.378000 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.378081 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.378911 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.378968 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.380169 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.381389 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.393277 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdwmv\" (UniqueName: \"kubernetes.io/projected/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-kube-api-access-kdwmv\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.394681 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pdz9\" (UniqueName: \"kubernetes.io/projected/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-kube-api-access-4pdz9\") pod \"watcher-kuttl-applier-0\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.442731 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.502168 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:49 crc kubenswrapper[4813]: I0320 16:22:49.927884 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:22:49 crc kubenswrapper[4813]: W0320 16:22:49.938857 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b938e62_0adc_4638_92a4_9d598276519e.slice/crio-8b3803a772be131dc3cb0dd3dde4ebea8491fa71f2a2c188ce823203d9c3bac8 WatchSource:0}: Error finding container 8b3803a772be131dc3cb0dd3dde4ebea8491fa71f2a2c188ce823203d9c3bac8: Status 404 returned error can't find the container with id 8b3803a772be131dc3cb0dd3dde4ebea8491fa71f2a2c188ce823203d9c3bac8 Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:50.098314 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:50.337112 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:22:51 crc kubenswrapper[4813]: W0320 16:22:50.344289 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ec4b12c_b8ba_42dc_aaf4_7785c8b51ddd.slice/crio-091a2bebcf10afcdd9e8f7327454ee7b704a093c32c074aa2efb009fffcf3cb2 WatchSource:0}: Error finding container 091a2bebcf10afcdd9e8f7327454ee7b704a093c32c074aa2efb009fffcf3cb2: Status 404 returned error can't find the container with id 091a2bebcf10afcdd9e8f7327454ee7b704a093c32c074aa2efb009fffcf3cb2 Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:50.352506 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:50.377737 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"7b938e62-0adc-4638-92a4-9d598276519e","Type":"ContainerStarted","Data":"a3d12b190d92bca44bdbe28a99fc0f0cd8d57b22c7870597ac29802b012398c2"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:50.377771 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"7b938e62-0adc-4638-92a4-9d598276519e","Type":"ContainerStarted","Data":"8b3803a772be131dc3cb0dd3dde4ebea8491fa71f2a2c188ce823203d9c3bac8"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:50.379430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4","Type":"ContainerStarted","Data":"7f5f1e2537ad957f238d23996d6d874b33aee1b9d5111ad4d8ac65fa8f6081b7"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.396134 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"7b938e62-0adc-4638-92a4-9d598276519e","Type":"ContainerStarted","Data":"8ba2203bc71407c6cb111a72cea47314603cca28db8bb7676bd5186d09e1c375"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.396556 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.401957 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4","Type":"ContainerStarted","Data":"1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.409080 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd","Type":"ContainerStarted","Data":"ea94c04a30825396ef85c275c19b9e726c1c3df422495938aa8562d54748ccd2"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.409125 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd","Type":"ContainerStarted","Data":"091a2bebcf10afcdd9e8f7327454ee7b704a093c32c074aa2efb009fffcf3cb2"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.419926 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"dff83d43-1e8c-4996-a4bd-93def70f4050","Type":"ContainerStarted","Data":"74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.419986 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"dff83d43-1e8c-4996-a4bd-93def70f4050","Type":"ContainerStarted","Data":"7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.420021 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"dff83d43-1e8c-4996-a4bd-93def70f4050","Type":"ContainerStarted","Data":"ab3f2ad51cd697d90968c2dec938cb310a4786d9ed275c4a5a1e779148732f52"} Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.420971 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.438105 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.438082206 podStartE2EDuration="2.438082206s" podCreationTimestamp="2026-03-20 16:22:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:51.437793529 +0000 UTC m=+2700.860496370" watchObservedRunningTime="2026-03-20 16:22:51.438082206 +0000 UTC m=+2700.860785047" Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.442161 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-1" podStartSLOduration=2.442135166 podStartE2EDuration="2.442135166s" podCreationTimestamp="2026-03-20 16:22:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:51.419222638 +0000 UTC m=+2700.841925479" watchObservedRunningTime="2026-03-20 16:22:51.442135166 +0000 UTC m=+2700.864838007" Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.454278 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=2.454259863 podStartE2EDuration="2.454259863s" podCreationTimestamp="2026-03-20 16:22:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:51.452846015 +0000 UTC m=+2700.875548866" watchObservedRunningTime="2026-03-20 16:22:51.454259863 +0000 UTC m=+2700.876962704" Mar 20 16:22:51 crc kubenswrapper[4813]: I0320 16:22:51.472655 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=3.472634678 podStartE2EDuration="3.472634678s" podCreationTimestamp="2026-03-20 16:22:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:22:51.472106574 +0000 UTC m=+2700.894809415" watchObservedRunningTime="2026-03-20 16:22:51.472634678 +0000 UTC m=+2700.895337539" Mar 20 16:22:53 crc kubenswrapper[4813]: I0320 16:22:53.783624 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:53 crc kubenswrapper[4813]: I0320 16:22:53.862870 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:54 crc kubenswrapper[4813]: I0320 16:22:54.335665 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:54 crc kubenswrapper[4813]: I0320 16:22:54.362124 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:54 crc kubenswrapper[4813]: I0320 16:22:54.443578 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.335016 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.343212 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.362170 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.366808 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.443747 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.474764 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.503425 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.507015 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.510112 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.534591 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:22:59 crc kubenswrapper[4813]: I0320 16:22:59.550721 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.129217 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8"] Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.130680 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.134175 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-scripts" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.138550 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8"] Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.139886 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.281665 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-config-data\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.281717 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdj5t\" (UniqueName: \"kubernetes.io/projected/f30f5314-3d20-4d91-9653-60096c44febf-kube-api-access-pdj5t\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.281749 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts-volume\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-scripts-volume\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.281776 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-combined-ca-bundle\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.383556 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-config-data\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.383606 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdj5t\" (UniqueName: \"kubernetes.io/projected/f30f5314-3d20-4d91-9653-60096c44febf-kube-api-access-pdj5t\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.383641 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts-volume\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-scripts-volume\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.383663 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-combined-ca-bundle\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.397672 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-combined-ca-bundle\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.399382 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-config-data\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.399754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts-volume\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-scripts-volume\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.405182 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdj5t\" (UniqueName: \"kubernetes.io/projected/f30f5314-3d20-4d91-9653-60096c44febf-kube-api-access-pdj5t\") pod \"watcher-kuttl-db-purge-29567063-nr2p8\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.448898 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.505062 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.527586 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:00 crc kubenswrapper[4813]: I0320 16:23:00.911528 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8"] Mar 20 16:23:00 crc kubenswrapper[4813]: W0320 16:23:00.916632 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf30f5314_3d20_4d91_9653_60096c44febf.slice/crio-85aa5d2721e3303f048638e7d3cc7a35cce6da29e91d5aef97dd9de8a9730090 WatchSource:0}: Error finding container 85aa5d2721e3303f048638e7d3cc7a35cce6da29e91d5aef97dd9de8a9730090: Status 404 returned error can't find the container with id 85aa5d2721e3303f048638e7d3cc7a35cce6da29e91d5aef97dd9de8a9730090 Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.271947 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:23:01 crc kubenswrapper[4813]: E0320 16:23:01.272414 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.516557 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" event={"ID":"f30f5314-3d20-4d91-9653-60096c44febf","Type":"ContainerStarted","Data":"4ce0f2637c7fb74b111fe0fc69ee6d91846b8fcd129a98dc40b76a1f83655f4c"} Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.516599 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" event={"ID":"f30f5314-3d20-4d91-9653-60096c44febf","Type":"ContainerStarted","Data":"85aa5d2721e3303f048638e7d3cc7a35cce6da29e91d5aef97dd9de8a9730090"} Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.535547 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" podStartSLOduration=1.5355241830000002 podStartE2EDuration="1.535524183s" podCreationTimestamp="2026-03-20 16:23:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:23:01.532211113 +0000 UTC m=+2710.954913954" watchObservedRunningTime="2026-03-20 16:23:01.535524183 +0000 UTC m=+2710.958227024" Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.739301 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.739649 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-central-agent" containerID="cri-o://86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e" gracePeriod=30 Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.739729 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="proxy-httpd" containerID="cri-o://8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f" gracePeriod=30 Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.739738 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-notification-agent" containerID="cri-o://1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39" gracePeriod=30 Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.739729 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="sg-core" containerID="cri-o://04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283" gracePeriod=30 Mar 20 16:23:01 crc kubenswrapper[4813]: I0320 16:23:01.775701 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Mar 20 16:23:02 crc kubenswrapper[4813]: I0320 16:23:02.529700 4813 generic.go:334] "Generic (PLEG): container finished" podID="29861606-f4eb-4223-8285-f6da9902e378" containerID="8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f" exitCode=0 Mar 20 16:23:02 crc kubenswrapper[4813]: I0320 16:23:02.529732 4813 generic.go:334] "Generic (PLEG): container finished" podID="29861606-f4eb-4223-8285-f6da9902e378" containerID="04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283" exitCode=2 Mar 20 16:23:02 crc kubenswrapper[4813]: I0320 16:23:02.529743 4813 generic.go:334] "Generic (PLEG): container finished" podID="29861606-f4eb-4223-8285-f6da9902e378" containerID="86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e" exitCode=0 Mar 20 16:23:02 crc kubenswrapper[4813]: I0320 16:23:02.529827 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerDied","Data":"8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f"} Mar 20 16:23:02 crc kubenswrapper[4813]: I0320 16:23:02.529859 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerDied","Data":"04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283"} Mar 20 16:23:02 crc kubenswrapper[4813]: I0320 16:23:02.529871 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerDied","Data":"86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e"} Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.425961 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.532501 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-ceilometer-tls-certs\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.533221 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-combined-ca-bundle\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.533361 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-sg-core-conf-yaml\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.533458 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtcqn\" (UniqueName: \"kubernetes.io/projected/29861606-f4eb-4223-8285-f6da9902e378-kube-api-access-jtcqn\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.533547 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-config-data\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.533621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-scripts\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.533733 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-log-httpd\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.533838 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-run-httpd\") pod \"29861606-f4eb-4223-8285-f6da9902e378\" (UID: \"29861606-f4eb-4223-8285-f6da9902e378\") " Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.535025 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.536797 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.539580 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-scripts" (OuterVolumeSpecName: "scripts") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.542635 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29861606-f4eb-4223-8285-f6da9902e378-kube-api-access-jtcqn" (OuterVolumeSpecName: "kube-api-access-jtcqn") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "kube-api-access-jtcqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.558340 4813 generic.go:334] "Generic (PLEG): container finished" podID="29861606-f4eb-4223-8285-f6da9902e378" containerID="1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39" exitCode=0 Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.558385 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerDied","Data":"1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39"} Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.558415 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"29861606-f4eb-4223-8285-f6da9902e378","Type":"ContainerDied","Data":"7a0bb5999b31a2ff1f84bff633ae197da802a0e6bbb0fa8c0f659e03d5463935"} Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.558432 4813 scope.go:117] "RemoveContainer" containerID="8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.558460 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.562408 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.588451 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.604676 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.631778 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-config-data" (OuterVolumeSpecName: "config-data") pod "29861606-f4eb-4223-8285-f6da9902e378" (UID: "29861606-f4eb-4223-8285-f6da9902e378"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.637946 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.637976 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.637985 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.637994 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtcqn\" (UniqueName: \"kubernetes.io/projected/29861606-f4eb-4223-8285-f6da9902e378-kube-api-access-jtcqn\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.638004 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.638012 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29861606-f4eb-4223-8285-f6da9902e378-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.638022 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.638031 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29861606-f4eb-4223-8285-f6da9902e378-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.688949 4813 scope.go:117] "RemoveContainer" containerID="04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.705253 4813 scope.go:117] "RemoveContainer" containerID="1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.734209 4813 scope.go:117] "RemoveContainer" containerID="86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.754812 4813 scope.go:117] "RemoveContainer" containerID="8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f" Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.755237 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f\": container with ID starting with 8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f not found: ID does not exist" containerID="8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.755282 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f"} err="failed to get container status \"8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f\": rpc error: code = NotFound desc = could not find container \"8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f\": container with ID starting with 8651c67fad3d21a6b4e7c80efe8e1a0ae8163e45d57f2c6d973250902854ea9f not found: ID does not exist" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.755307 4813 scope.go:117] "RemoveContainer" containerID="04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283" Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.755771 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283\": container with ID starting with 04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283 not found: ID does not exist" containerID="04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.755810 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283"} err="failed to get container status \"04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283\": rpc error: code = NotFound desc = could not find container \"04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283\": container with ID starting with 04d5685018671c45f908061fd89dc1435c0e83fe9b18ea2d65c902ae4a6bc283 not found: ID does not exist" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.755836 4813 scope.go:117] "RemoveContainer" containerID="1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39" Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.756101 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39\": container with ID starting with 1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39 not found: ID does not exist" containerID="1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.756125 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39"} err="failed to get container status \"1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39\": rpc error: code = NotFound desc = could not find container \"1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39\": container with ID starting with 1889c6b86589b4d17a173ecd2a166466382a9429cbf5d6ceb9d1cc9f97ed3a39 not found: ID does not exist" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.756138 4813 scope.go:117] "RemoveContainer" containerID="86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e" Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.756370 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e\": container with ID starting with 86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e not found: ID does not exist" containerID="86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.756397 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e"} err="failed to get container status \"86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e\": rpc error: code = NotFound desc = could not find container \"86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e\": container with ID starting with 86312ab64d6bf0b7c4ab9e3db4d2e74bfc2a3a4fc2c1e2ba4044fabf6f460b7e not found: ID does not exist" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.893241 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.901202 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.913820 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.914157 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="proxy-httpd" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914174 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="proxy-httpd" Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.914197 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-notification-agent" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914203 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-notification-agent" Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.914210 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="sg-core" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914216 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="sg-core" Mar 20 16:23:03 crc kubenswrapper[4813]: E0320 16:23:03.914222 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-central-agent" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914227 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-central-agent" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914366 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-central-agent" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914384 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="ceilometer-notification-agent" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914397 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="proxy-httpd" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.914404 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="29861606-f4eb-4223-8285-f6da9902e378" containerName="sg-core" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.915791 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.918263 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.918423 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.918644 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:23:03 crc kubenswrapper[4813]: I0320 16:23:03.941093 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.044707 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.044774 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-config-data\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.044807 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.044875 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-run-httpd\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.044928 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f754r\" (UniqueName: \"kubernetes.io/projected/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-kube-api-access-f754r\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.044976 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-scripts\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.045009 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.045033 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-log-httpd\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146277 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-scripts\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146350 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146384 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-log-httpd\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146444 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146472 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-config-data\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146522 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146593 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-run-httpd\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.146646 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f754r\" (UniqueName: \"kubernetes.io/projected/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-kube-api-access-f754r\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.147043 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-log-httpd\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.147566 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-run-httpd\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.152340 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.153114 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-scripts\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.153351 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.154982 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.162237 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-config-data\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.166786 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f754r\" (UniqueName: \"kubernetes.io/projected/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-kube-api-access-f754r\") pod \"ceilometer-0\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.237665 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.569814 4813 generic.go:334] "Generic (PLEG): container finished" podID="f30f5314-3d20-4d91-9653-60096c44febf" containerID="4ce0f2637c7fb74b111fe0fc69ee6d91846b8fcd129a98dc40b76a1f83655f4c" exitCode=0 Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.569918 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" event={"ID":"f30f5314-3d20-4d91-9653-60096c44febf","Type":"ContainerDied","Data":"4ce0f2637c7fb74b111fe0fc69ee6d91846b8fcd129a98dc40b76a1f83655f4c"} Mar 20 16:23:04 crc kubenswrapper[4813]: W0320 16:23:04.669830 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46c3ae7f_aefc_405e_aa3a_6ef9191d9840.slice/crio-457302a3a99cfbc56d22677421979a3e3548b1687820c6970d37e960739cad5c WatchSource:0}: Error finding container 457302a3a99cfbc56d22677421979a3e3548b1687820c6970d37e960739cad5c: Status 404 returned error can't find the container with id 457302a3a99cfbc56d22677421979a3e3548b1687820c6970d37e960739cad5c Mar 20 16:23:04 crc kubenswrapper[4813]: I0320 16:23:04.672133 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:05 crc kubenswrapper[4813]: I0320 16:23:05.279136 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29861606-f4eb-4223-8285-f6da9902e378" path="/var/lib/kubelet/pods/29861606-f4eb-4223-8285-f6da9902e378/volumes" Mar 20 16:23:05 crc kubenswrapper[4813]: I0320 16:23:05.578608 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerStarted","Data":"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6"} Mar 20 16:23:05 crc kubenswrapper[4813]: I0320 16:23:05.578651 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerStarted","Data":"457302a3a99cfbc56d22677421979a3e3548b1687820c6970d37e960739cad5c"} Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.159635 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.286995 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts-volume\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-scripts-volume\") pod \"f30f5314-3d20-4d91-9653-60096c44febf\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.287075 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-config-data\") pod \"f30f5314-3d20-4d91-9653-60096c44febf\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.287104 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-combined-ca-bundle\") pod \"f30f5314-3d20-4d91-9653-60096c44febf\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.287177 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdj5t\" (UniqueName: \"kubernetes.io/projected/f30f5314-3d20-4d91-9653-60096c44febf-kube-api-access-pdj5t\") pod \"f30f5314-3d20-4d91-9653-60096c44febf\" (UID: \"f30f5314-3d20-4d91-9653-60096c44febf\") " Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.294541 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-scripts-volume" (OuterVolumeSpecName: "scripts-volume") pod "f30f5314-3d20-4d91-9653-60096c44febf" (UID: "f30f5314-3d20-4d91-9653-60096c44febf"). InnerVolumeSpecName "scripts-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.301038 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f30f5314-3d20-4d91-9653-60096c44febf-kube-api-access-pdj5t" (OuterVolumeSpecName: "kube-api-access-pdj5t") pod "f30f5314-3d20-4d91-9653-60096c44febf" (UID: "f30f5314-3d20-4d91-9653-60096c44febf"). InnerVolumeSpecName "kube-api-access-pdj5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.317739 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f30f5314-3d20-4d91-9653-60096c44febf" (UID: "f30f5314-3d20-4d91-9653-60096c44febf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.359409 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-config-data" (OuterVolumeSpecName: "config-data") pod "f30f5314-3d20-4d91-9653-60096c44febf" (UID: "f30f5314-3d20-4d91-9653-60096c44febf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.389167 4813 reconciler_common.go:293] "Volume detached for volume \"scripts-volume\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-scripts-volume\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.389199 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.389212 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30f5314-3d20-4d91-9653-60096c44febf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.389222 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdj5t\" (UniqueName: \"kubernetes.io/projected/f30f5314-3d20-4d91-9653-60096c44febf-kube-api-access-pdj5t\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.588048 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerStarted","Data":"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4"} Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.590583 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" event={"ID":"f30f5314-3d20-4d91-9653-60096c44febf","Type":"ContainerDied","Data":"85aa5d2721e3303f048638e7d3cc7a35cce6da29e91d5aef97dd9de8a9730090"} Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.590614 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85aa5d2721e3303f048638e7d3cc7a35cce6da29e91d5aef97dd9de8a9730090" Mar 20 16:23:06 crc kubenswrapper[4813]: I0320 16:23:06.590663 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8" Mar 20 16:23:07 crc kubenswrapper[4813]: I0320 16:23:07.602883 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerStarted","Data":"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2"} Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.357408 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.366830 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-z5pzl"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.373788 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watchertest-account-delete-x56mb"] Mar 20 16:23:08 crc kubenswrapper[4813]: E0320 16:23:08.374115 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f30f5314-3d20-4d91-9653-60096c44febf" containerName="watcher-db-manage" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.374131 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f30f5314-3d20-4d91-9653-60096c44febf" containerName="watcher-db-manage" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.374268 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f30f5314-3d20-4d91-9653-60096c44febf" containerName="watcher-db-manage" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.374793 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.389661 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.397937 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watchertest-account-delete-x56mb"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.408495 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-purge-29567063-nr2p8"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.476540 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.476820 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" containerName="watcher-decision-engine" containerID="cri-o://ea94c04a30825396ef85c275c19b9e726c1c3df422495938aa8562d54748ccd2" gracePeriod=30 Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.498036 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.498288 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-kuttl-api-log" containerID="cri-o://7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd" gracePeriod=30 Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.498425 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-api" containerID="cri-o://74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d" gracePeriod=30 Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.516224 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.516470 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-1" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-kuttl-api-log" containerID="cri-o://a3d12b190d92bca44bdbe28a99fc0f0cd8d57b22c7870597ac29802b012398c2" gracePeriod=30 Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.516846 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-1" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-api" containerID="cri-o://8ba2203bc71407c6cb111a72cea47314603cca28db8bb7676bd5186d09e1c375" gracePeriod=30 Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.523712 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.523943 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" containerName="watcher-applier" containerID="cri-o://1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" gracePeriod=30 Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.528299 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/541179f0-949f-488a-a9ae-06319b771d4e-operator-scripts\") pod \"watchertest-account-delete-x56mb\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.528396 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jncn\" (UniqueName: \"kubernetes.io/projected/541179f0-949f-488a-a9ae-06319b771d4e-kube-api-access-6jncn\") pod \"watchertest-account-delete-x56mb\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.632324 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/541179f0-949f-488a-a9ae-06319b771d4e-operator-scripts\") pod \"watchertest-account-delete-x56mb\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.632399 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jncn\" (UniqueName: \"kubernetes.io/projected/541179f0-949f-488a-a9ae-06319b771d4e-kube-api-access-6jncn\") pod \"watchertest-account-delete-x56mb\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.633341 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/541179f0-949f-488a-a9ae-06319b771d4e-operator-scripts\") pod \"watchertest-account-delete-x56mb\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.661157 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jncn\" (UniqueName: \"kubernetes.io/projected/541179f0-949f-488a-a9ae-06319b771d4e-kube-api-access-6jncn\") pod \"watchertest-account-delete-x56mb\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:08 crc kubenswrapper[4813]: I0320 16:23:08.703877 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.283251 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0ee1311-0844-4018-81dc-0b0fed02378c" path="/var/lib/kubelet/pods/b0ee1311-0844-4018-81dc-0b0fed02378c/volumes" Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.284067 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f30f5314-3d20-4d91-9653-60096c44febf" path="/var/lib/kubelet/pods/f30f5314-3d20-4d91-9653-60096c44febf/volumes" Mar 20 16:23:09 crc kubenswrapper[4813]: E0320 16:23:09.445667 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:23:09 crc kubenswrapper[4813]: E0320 16:23:09.447095 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:23:09 crc kubenswrapper[4813]: E0320 16:23:09.451769 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Mar 20 16:23:09 crc kubenswrapper[4813]: E0320 16:23:09.451939 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" containerName="watcher-applier" Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.522044 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watchertest-account-delete-x56mb"] Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.622149 4813 generic.go:334] "Generic (PLEG): container finished" podID="7b938e62-0adc-4638-92a4-9d598276519e" containerID="a3d12b190d92bca44bdbe28a99fc0f0cd8d57b22c7870597ac29802b012398c2" exitCode=143 Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.622229 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"7b938e62-0adc-4638-92a4-9d598276519e","Type":"ContainerDied","Data":"a3d12b190d92bca44bdbe28a99fc0f0cd8d57b22c7870597ac29802b012398c2"} Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.624855 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerStarted","Data":"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1"} Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.626017 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.628165 4813 generic.go:334] "Generic (PLEG): container finished" podID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerID="7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd" exitCode=143 Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.628284 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"dff83d43-1e8c-4996-a4bd-93def70f4050","Type":"ContainerDied","Data":"7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd"} Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.629440 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchertest-account-delete-x56mb" event={"ID":"541179f0-949f-488a-a9ae-06319b771d4e","Type":"ContainerStarted","Data":"6457a0f235ca61bd225fcf2118f91b9e0dc0e74e24320feeb0b0a933a36ca447"} Mar 20 16:23:09 crc kubenswrapper[4813]: I0320 16:23:09.652316 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.184874779 podStartE2EDuration="6.65229622s" podCreationTimestamp="2026-03-20 16:23:03 +0000 UTC" firstStartedPulling="2026-03-20 16:23:04.672530587 +0000 UTC m=+2714.095233428" lastFinishedPulling="2026-03-20 16:23:09.139952028 +0000 UTC m=+2718.562654869" observedRunningTime="2026-03-20 16:23:09.645388853 +0000 UTC m=+2719.068091694" watchObservedRunningTime="2026-03-20 16:23:09.65229622 +0000 UTC m=+2719.074999061" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.129601 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.227:9322/\": read tcp 10.217.0.2:48450->10.217.0.227:9322: read: connection reset by peer" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.129639 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.227:9322/\": read tcp 10.217.0.2:48440->10.217.0.227:9322: read: connection reset by peer" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.242144 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-1" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.228:9322/\": read tcp 10.217.0.2:54848->10.217.0.228:9322: read: connection reset by peer" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.242161 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-1" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.228:9322/\": read tcp 10.217.0.2:54844->10.217.0.228:9322: read: connection reset by peer" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.606263 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.647751 4813 generic.go:334] "Generic (PLEG): container finished" podID="7b938e62-0adc-4638-92a4-9d598276519e" containerID="8ba2203bc71407c6cb111a72cea47314603cca28db8bb7676bd5186d09e1c375" exitCode=0 Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.647811 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"7b938e62-0adc-4638-92a4-9d598276519e","Type":"ContainerDied","Data":"8ba2203bc71407c6cb111a72cea47314603cca28db8bb7676bd5186d09e1c375"} Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.649253 4813 generic.go:334] "Generic (PLEG): container finished" podID="541179f0-949f-488a-a9ae-06319b771d4e" containerID="f497e899a329e1a5c772467bdabd0280e11ec6ce83f73b3fd923e833da7fb878" exitCode=0 Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.649325 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchertest-account-delete-x56mb" event={"ID":"541179f0-949f-488a-a9ae-06319b771d4e","Type":"ContainerDied","Data":"f497e899a329e1a5c772467bdabd0280e11ec6ce83f73b3fd923e833da7fb878"} Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.653539 4813 generic.go:334] "Generic (PLEG): container finished" podID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerID="74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d" exitCode=0 Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.653655 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.653701 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"dff83d43-1e8c-4996-a4bd-93def70f4050","Type":"ContainerDied","Data":"74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d"} Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.653733 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"dff83d43-1e8c-4996-a4bd-93def70f4050","Type":"ContainerDied","Data":"ab3f2ad51cd697d90968c2dec938cb310a4786d9ed275c4a5a1e779148732f52"} Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.653752 4813 scope.go:117] "RemoveContainer" containerID="74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.672644 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-combined-ca-bundle\") pod \"dff83d43-1e8c-4996-a4bd-93def70f4050\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.672713 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-cert-memcached-mtls\") pod \"dff83d43-1e8c-4996-a4bd-93def70f4050\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.672793 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-custom-prometheus-ca\") pod \"dff83d43-1e8c-4996-a4bd-93def70f4050\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.672843 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dff83d43-1e8c-4996-a4bd-93def70f4050-logs\") pod \"dff83d43-1e8c-4996-a4bd-93def70f4050\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.672941 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbkht\" (UniqueName: \"kubernetes.io/projected/dff83d43-1e8c-4996-a4bd-93def70f4050-kube-api-access-vbkht\") pod \"dff83d43-1e8c-4996-a4bd-93def70f4050\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.672973 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-config-data\") pod \"dff83d43-1e8c-4996-a4bd-93def70f4050\" (UID: \"dff83d43-1e8c-4996-a4bd-93def70f4050\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.674610 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dff83d43-1e8c-4996-a4bd-93def70f4050-logs" (OuterVolumeSpecName: "logs") pod "dff83d43-1e8c-4996-a4bd-93def70f4050" (UID: "dff83d43-1e8c-4996-a4bd-93def70f4050"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.675142 4813 scope.go:117] "RemoveContainer" containerID="7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.682035 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dff83d43-1e8c-4996-a4bd-93def70f4050-kube-api-access-vbkht" (OuterVolumeSpecName: "kube-api-access-vbkht") pod "dff83d43-1e8c-4996-a4bd-93def70f4050" (UID: "dff83d43-1e8c-4996-a4bd-93def70f4050"). InnerVolumeSpecName "kube-api-access-vbkht". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.717506 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dff83d43-1e8c-4996-a4bd-93def70f4050" (UID: "dff83d43-1e8c-4996-a4bd-93def70f4050"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.733840 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "dff83d43-1e8c-4996-a4bd-93def70f4050" (UID: "dff83d43-1e8c-4996-a4bd-93def70f4050"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.765316 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-config-data" (OuterVolumeSpecName: "config-data") pod "dff83d43-1e8c-4996-a4bd-93def70f4050" (UID: "dff83d43-1e8c-4996-a4bd-93def70f4050"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.775313 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbkht\" (UniqueName: \"kubernetes.io/projected/dff83d43-1e8c-4996-a4bd-93def70f4050-kube-api-access-vbkht\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.775338 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.775350 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.775359 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.775367 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dff83d43-1e8c-4996-a4bd-93def70f4050-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.782328 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "dff83d43-1e8c-4996-a4bd-93def70f4050" (UID: "dff83d43-1e8c-4996-a4bd-93def70f4050"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.833748 4813 scope.go:117] "RemoveContainer" containerID="74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d" Mar 20 16:23:10 crc kubenswrapper[4813]: E0320 16:23:10.835197 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d\": container with ID starting with 74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d not found: ID does not exist" containerID="74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.835245 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d"} err="failed to get container status \"74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d\": rpc error: code = NotFound desc = could not find container \"74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d\": container with ID starting with 74f7c69f60135445ac004aacef94ce4743baa1490deaac81373a39f9e416094d not found: ID does not exist" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.835272 4813 scope.go:117] "RemoveContainer" containerID="7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.835441 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:23:10 crc kubenswrapper[4813]: E0320 16:23:10.836769 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd\": container with ID starting with 7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd not found: ID does not exist" containerID="7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.836839 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd"} err="failed to get container status \"7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd\": rpc error: code = NotFound desc = could not find container \"7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd\": container with ID starting with 7c91e80d9b948e6f357c539997c737a190da4792524fdcaeaddf8130744566cd not found: ID does not exist" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.885785 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/dff83d43-1e8c-4996-a4bd-93def70f4050-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.990739 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b938e62-0adc-4638-92a4-9d598276519e-logs\") pod \"7b938e62-0adc-4638-92a4-9d598276519e\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.990824 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-cert-memcached-mtls\") pod \"7b938e62-0adc-4638-92a4-9d598276519e\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.990866 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-custom-prometheus-ca\") pod \"7b938e62-0adc-4638-92a4-9d598276519e\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.990933 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg6w2\" (UniqueName: \"kubernetes.io/projected/7b938e62-0adc-4638-92a4-9d598276519e-kube-api-access-bg6w2\") pod \"7b938e62-0adc-4638-92a4-9d598276519e\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.990974 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-config-data\") pod \"7b938e62-0adc-4638-92a4-9d598276519e\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.991082 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-combined-ca-bundle\") pod \"7b938e62-0adc-4638-92a4-9d598276519e\" (UID: \"7b938e62-0adc-4638-92a4-9d598276519e\") " Mar 20 16:23:10 crc kubenswrapper[4813]: I0320 16:23:10.993641 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.000593 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.000966 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b938e62-0adc-4638-92a4-9d598276519e-logs" (OuterVolumeSpecName: "logs") pod "7b938e62-0adc-4638-92a4-9d598276519e" (UID: "7b938e62-0adc-4638-92a4-9d598276519e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.003403 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b938e62-0adc-4638-92a4-9d598276519e-kube-api-access-bg6w2" (OuterVolumeSpecName: "kube-api-access-bg6w2") pod "7b938e62-0adc-4638-92a4-9d598276519e" (UID: "7b938e62-0adc-4638-92a4-9d598276519e"). InnerVolumeSpecName "kube-api-access-bg6w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.021315 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "7b938e62-0adc-4638-92a4-9d598276519e" (UID: "7b938e62-0adc-4638-92a4-9d598276519e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.021340 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b938e62-0adc-4638-92a4-9d598276519e" (UID: "7b938e62-0adc-4638-92a4-9d598276519e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.035806 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-config-data" (OuterVolumeSpecName: "config-data") pod "7b938e62-0adc-4638-92a4-9d598276519e" (UID: "7b938e62-0adc-4638-92a4-9d598276519e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.077432 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "7b938e62-0adc-4638-92a4-9d598276519e" (UID: "7b938e62-0adc-4638-92a4-9d598276519e"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.093361 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.093395 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b938e62-0adc-4638-92a4-9d598276519e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.093404 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.093414 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.093425 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg6w2\" (UniqueName: \"kubernetes.io/projected/7b938e62-0adc-4638-92a4-9d598276519e-kube-api-access-bg6w2\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.093434 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b938e62-0adc-4638-92a4-9d598276519e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:11 crc kubenswrapper[4813]: E0320 16:23:11.143537 4813 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.181:33856->38.102.83.181:37193: write tcp 38.102.83.181:33856->38.102.83.181:37193: write: broken pipe Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.277000 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" path="/var/lib/kubelet/pods/dff83d43-1e8c-4996-a4bd-93def70f4050/volumes" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.478577 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.666708 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"7b938e62-0adc-4638-92a4-9d598276519e","Type":"ContainerDied","Data":"8b3803a772be131dc3cb0dd3dde4ebea8491fa71f2a2c188ce823203d9c3bac8"} Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.666762 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.667014 4813 scope.go:117] "RemoveContainer" containerID="8ba2203bc71407c6cb111a72cea47314603cca28db8bb7676bd5186d09e1c375" Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.699465 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.710237 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:23:11 crc kubenswrapper[4813]: I0320 16:23:11.723514 4813 scope.go:117] "RemoveContainer" containerID="a3d12b190d92bca44bdbe28a99fc0f0cd8d57b22c7870597ac29802b012398c2" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.213522 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.266778 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:23:12 crc kubenswrapper[4813]: E0320 16:23:12.267005 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.292584 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.318090 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/541179f0-949f-488a-a9ae-06319b771d4e-operator-scripts\") pod \"541179f0-949f-488a-a9ae-06319b771d4e\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.318235 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jncn\" (UniqueName: \"kubernetes.io/projected/541179f0-949f-488a-a9ae-06319b771d4e-kube-api-access-6jncn\") pod \"541179f0-949f-488a-a9ae-06319b771d4e\" (UID: \"541179f0-949f-488a-a9ae-06319b771d4e\") " Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.318982 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/541179f0-949f-488a-a9ae-06319b771d4e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "541179f0-949f-488a-a9ae-06319b771d4e" (UID: "541179f0-949f-488a-a9ae-06319b771d4e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.323629 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/541179f0-949f-488a-a9ae-06319b771d4e-kube-api-access-6jncn" (OuterVolumeSpecName: "kube-api-access-6jncn") pod "541179f0-949f-488a-a9ae-06319b771d4e" (UID: "541179f0-949f-488a-a9ae-06319b771d4e"). InnerVolumeSpecName "kube-api-access-6jncn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.419969 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pdz9\" (UniqueName: \"kubernetes.io/projected/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-kube-api-access-4pdz9\") pod \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.420034 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-combined-ca-bundle\") pod \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.420096 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-logs\") pod \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.420134 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-cert-memcached-mtls\") pod \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.420518 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-logs" (OuterVolumeSpecName: "logs") pod "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" (UID: "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.420715 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-config-data\") pod \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\" (UID: \"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4\") " Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.421474 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jncn\" (UniqueName: \"kubernetes.io/projected/541179f0-949f-488a-a9ae-06319b771d4e-kube-api-access-6jncn\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.421515 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.421528 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/541179f0-949f-488a-a9ae-06319b771d4e-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.437274 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-kube-api-access-4pdz9" (OuterVolumeSpecName: "kube-api-access-4pdz9") pod "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" (UID: "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4"). InnerVolumeSpecName "kube-api-access-4pdz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.464458 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" (UID: "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.487578 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-config-data" (OuterVolumeSpecName: "config-data") pod "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" (UID: "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.514602 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" (UID: "5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.526468 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pdz9\" (UniqueName: \"kubernetes.io/projected/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-kube-api-access-4pdz9\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.526519 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.526531 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.526544 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.677999 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watchertest-account-delete-x56mb" event={"ID":"541179f0-949f-488a-a9ae-06319b771d4e","Type":"ContainerDied","Data":"6457a0f235ca61bd225fcf2118f91b9e0dc0e74e24320feeb0b0a933a36ca447"} Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.679039 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6457a0f235ca61bd225fcf2118f91b9e0dc0e74e24320feeb0b0a933a36ca447" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.678077 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watchertest-account-delete-x56mb" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.692637 4813 generic.go:334] "Generic (PLEG): container finished" podID="5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" containerID="1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" exitCode=0 Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.692723 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.692790 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4","Type":"ContainerDied","Data":"1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b"} Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.692833 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4","Type":"ContainerDied","Data":"7f5f1e2537ad957f238d23996d6d874b33aee1b9d5111ad4d8ac65fa8f6081b7"} Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.692855 4813 scope.go:117] "RemoveContainer" containerID="1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.692904 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-central-agent" containerID="cri-o://8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" gracePeriod=30 Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.693049 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="sg-core" containerID="cri-o://76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" gracePeriod=30 Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.693148 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="proxy-httpd" containerID="cri-o://9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" gracePeriod=30 Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.693170 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-notification-agent" containerID="cri-o://5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" gracePeriod=30 Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.756358 4813 scope.go:117] "RemoveContainer" containerID="1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" Mar 20 16:23:12 crc kubenswrapper[4813]: E0320 16:23:12.756933 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b\": container with ID starting with 1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b not found: ID does not exist" containerID="1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.756992 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b"} err="failed to get container status \"1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b\": rpc error: code = NotFound desc = could not find container \"1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b\": container with ID starting with 1bf575dd77383142b7b30d5419cfd07590cd33a23139c6c27e2772b6c92d882b not found: ID does not exist" Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.769674 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:23:12 crc kubenswrapper[4813]: I0320 16:23:12.797754 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.275669 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" path="/var/lib/kubelet/pods/5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4/volumes" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.276291 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b938e62-0adc-4638-92a4-9d598276519e" path="/var/lib/kubelet/pods/7b938e62-0adc-4638-92a4-9d598276519e/volumes" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.405369 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-jmjd8"] Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.411734 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-jmjd8"] Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.428960 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watchertest-account-delete-x56mb"] Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.436929 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-test-account-create-update-8xnqh"] Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.443546 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watchertest-account-delete-x56mb"] Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.449113 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-test-account-create-update-8xnqh"] Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.482039 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.646142 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-combined-ca-bundle\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.646601 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-sg-core-conf-yaml\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.646752 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f754r\" (UniqueName: \"kubernetes.io/projected/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-kube-api-access-f754r\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.646775 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-ceilometer-tls-certs\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.647209 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-scripts\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.647242 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-run-httpd\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.647344 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-config-data\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.647372 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-log-httpd\") pod \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\" (UID: \"46c3ae7f-aefc-405e-aa3a-6ef9191d9840\") " Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.648065 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.648254 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.651284 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-scripts" (OuterVolumeSpecName: "scripts") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.653341 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-kube-api-access-f754r" (OuterVolumeSpecName: "kube-api-access-f754r") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "kube-api-access-f754r". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.674862 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.688276 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705217 4813 generic.go:334] "Generic (PLEG): container finished" podID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerID="9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" exitCode=0 Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705244 4813 generic.go:334] "Generic (PLEG): container finished" podID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerID="76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" exitCode=2 Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705251 4813 generic.go:334] "Generic (PLEG): container finished" podID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerID="5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" exitCode=0 Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705257 4813 generic.go:334] "Generic (PLEG): container finished" podID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerID="8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" exitCode=0 Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705290 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerDied","Data":"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1"} Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705318 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerDied","Data":"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2"} Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705331 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerDied","Data":"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4"} Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705341 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerDied","Data":"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6"} Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705351 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"46c3ae7f-aefc-405e-aa3a-6ef9191d9840","Type":"ContainerDied","Data":"457302a3a99cfbc56d22677421979a3e3548b1687820c6970d37e960739cad5c"} Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705368 4813 scope.go:117] "RemoveContainer" containerID="9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.705776 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.723774 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.749099 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.749132 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.749147 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.749158 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f754r\" (UniqueName: \"kubernetes.io/projected/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-kube-api-access-f754r\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.749170 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.749180 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.749191 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.761291 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-config-data" (OuterVolumeSpecName: "config-data") pod "46c3ae7f-aefc-405e-aa3a-6ef9191d9840" (UID: "46c3ae7f-aefc-405e-aa3a-6ef9191d9840"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.834201 4813 scope.go:117] "RemoveContainer" containerID="76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.850280 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46c3ae7f-aefc-405e-aa3a-6ef9191d9840-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.854792 4813 scope.go:117] "RemoveContainer" containerID="5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.875033 4813 scope.go:117] "RemoveContainer" containerID="8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.894591 4813 scope.go:117] "RemoveContainer" containerID="9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" Mar 20 16:23:13 crc kubenswrapper[4813]: E0320 16:23:13.895214 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": container with ID starting with 9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1 not found: ID does not exist" containerID="9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.895267 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1"} err="failed to get container status \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": rpc error: code = NotFound desc = could not find container \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": container with ID starting with 9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.895299 4813 scope.go:117] "RemoveContainer" containerID="76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" Mar 20 16:23:13 crc kubenswrapper[4813]: E0320 16:23:13.896355 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": container with ID starting with 76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2 not found: ID does not exist" containerID="76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.896512 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2"} err="failed to get container status \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": rpc error: code = NotFound desc = could not find container \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": container with ID starting with 76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.896695 4813 scope.go:117] "RemoveContainer" containerID="5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" Mar 20 16:23:13 crc kubenswrapper[4813]: E0320 16:23:13.897236 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": container with ID starting with 5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4 not found: ID does not exist" containerID="5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.897288 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4"} err="failed to get container status \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": rpc error: code = NotFound desc = could not find container \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": container with ID starting with 5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.897320 4813 scope.go:117] "RemoveContainer" containerID="8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" Mar 20 16:23:13 crc kubenswrapper[4813]: E0320 16:23:13.897617 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": container with ID starting with 8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6 not found: ID does not exist" containerID="8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.897646 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6"} err="failed to get container status \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": rpc error: code = NotFound desc = could not find container \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": container with ID starting with 8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.897664 4813 scope.go:117] "RemoveContainer" containerID="9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.897897 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1"} err="failed to get container status \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": rpc error: code = NotFound desc = could not find container \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": container with ID starting with 9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.897930 4813 scope.go:117] "RemoveContainer" containerID="76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.898356 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2"} err="failed to get container status \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": rpc error: code = NotFound desc = could not find container \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": container with ID starting with 76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.898454 4813 scope.go:117] "RemoveContainer" containerID="5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.899025 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4"} err="failed to get container status \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": rpc error: code = NotFound desc = could not find container \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": container with ID starting with 5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.899067 4813 scope.go:117] "RemoveContainer" containerID="8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.899445 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6"} err="failed to get container status \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": rpc error: code = NotFound desc = could not find container \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": container with ID starting with 8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.899475 4813 scope.go:117] "RemoveContainer" containerID="9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.900007 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1"} err="failed to get container status \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": rpc error: code = NotFound desc = could not find container \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": container with ID starting with 9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.900104 4813 scope.go:117] "RemoveContainer" containerID="76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.900535 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2"} err="failed to get container status \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": rpc error: code = NotFound desc = could not find container \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": container with ID starting with 76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.900563 4813 scope.go:117] "RemoveContainer" containerID="5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.900939 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4"} err="failed to get container status \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": rpc error: code = NotFound desc = could not find container \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": container with ID starting with 5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.900972 4813 scope.go:117] "RemoveContainer" containerID="8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.901399 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6"} err="failed to get container status \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": rpc error: code = NotFound desc = could not find container \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": container with ID starting with 8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.901511 4813 scope.go:117] "RemoveContainer" containerID="9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.901902 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1"} err="failed to get container status \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": rpc error: code = NotFound desc = could not find container \"9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1\": container with ID starting with 9aa7a170665f738ffb175544ff62352df718b37b734268543a5f564998306cc1 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.902016 4813 scope.go:117] "RemoveContainer" containerID="76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.902322 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2"} err="failed to get container status \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": rpc error: code = NotFound desc = could not find container \"76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2\": container with ID starting with 76584e4fbae8f2f01db3103f6fa94b4989447133aad4ff1c97847989f5f7f3f2 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.902343 4813 scope.go:117] "RemoveContainer" containerID="5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.902684 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4"} err="failed to get container status \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": rpc error: code = NotFound desc = could not find container \"5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4\": container with ID starting with 5f6e1d12caec4dcfcaeb7fe116ab0191dbd521d95091a10f0e0e41c62b0edbc4 not found: ID does not exist" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.902706 4813 scope.go:117] "RemoveContainer" containerID="8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6" Mar 20 16:23:13 crc kubenswrapper[4813]: I0320 16:23:13.902909 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6"} err="failed to get container status \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": rpc error: code = NotFound desc = could not find container \"8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6\": container with ID starting with 8d05eac53525c67050326048784e11f5519f66d14007bf3e23109a2f8a4836c6 not found: ID does not exist" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.038791 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.055888 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069025 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069360 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-kuttl-api-log" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069382 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-kuttl-api-log" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069401 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="proxy-httpd" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069408 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="proxy-httpd" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069415 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-kuttl-api-log" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069424 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-kuttl-api-log" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069435 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="sg-core" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069442 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="sg-core" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069454 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-central-agent" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069463 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-central-agent" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069495 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-api" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069504 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-api" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069517 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-notification-agent" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069524 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-notification-agent" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069539 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" containerName="watcher-applier" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069547 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" containerName="watcher-applier" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069561 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-api" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069569 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-api" Mar 20 16:23:14 crc kubenswrapper[4813]: E0320 16:23:14.069580 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="541179f0-949f-488a-a9ae-06319b771d4e" containerName="mariadb-account-delete" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069587 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="541179f0-949f-488a-a9ae-06319b771d4e" containerName="mariadb-account-delete" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069754 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-central-agent" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069772 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="ceilometer-notification-agent" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069786 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="sg-core" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069797 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="541179f0-949f-488a-a9ae-06319b771d4e" containerName="mariadb-account-delete" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069807 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-api" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069817 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-kuttl-api-log" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069830 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b0c59ea-667d-46ad-93c5-8cc1a8b2b4c4" containerName="watcher-applier" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069842 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b938e62-0adc-4638-92a4-9d598276519e" containerName="watcher-api" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069854 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" containerName="proxy-httpd" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.069864 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="dff83d43-1e8c-4996-a4bd-93def70f4050" containerName="watcher-kuttl-api-log" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.071371 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.073639 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.073825 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.074155 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.079809 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.256761 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-run-httpd\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.256824 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.256852 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2zpl\" (UniqueName: \"kubernetes.io/projected/0dc16b36-d581-4e7c-9059-31fec0fc4810-kube-api-access-f2zpl\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.256906 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.256935 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-scripts\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.256964 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.256984 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-log-httpd\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.257007 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-config-data\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.358919 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.358983 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-scripts\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.359025 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.359053 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-log-httpd\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.359072 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-config-data\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.359206 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-run-httpd\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.359236 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.359254 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2zpl\" (UniqueName: \"kubernetes.io/projected/0dc16b36-d581-4e7c-9059-31fec0fc4810-kube-api-access-f2zpl\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.361712 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-log-httpd\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.361897 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-run-httpd\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.364207 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.364214 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.364389 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.364915 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-config-data\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.367177 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-scripts\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.392176 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2zpl\" (UniqueName: \"kubernetes.io/projected/0dc16b36-d581-4e7c-9059-31fec0fc4810-kube-api-access-f2zpl\") pod \"ceilometer-0\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.427397 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:14 crc kubenswrapper[4813]: I0320 16:23:14.929254 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:15 crc kubenswrapper[4813]: I0320 16:23:15.274969 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46c3ae7f-aefc-405e-aa3a-6ef9191d9840" path="/var/lib/kubelet/pods/46c3ae7f-aefc-405e-aa3a-6ef9191d9840/volumes" Mar 20 16:23:15 crc kubenswrapper[4813]: I0320 16:23:15.275783 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="541179f0-949f-488a-a9ae-06319b771d4e" path="/var/lib/kubelet/pods/541179f0-949f-488a-a9ae-06319b771d4e/volumes" Mar 20 16:23:15 crc kubenswrapper[4813]: I0320 16:23:15.276217 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="651ff3dc-d168-41b8-8bd9-fd7018a46024" path="/var/lib/kubelet/pods/651ff3dc-d168-41b8-8bd9-fd7018a46024/volumes" Mar 20 16:23:15 crc kubenswrapper[4813]: I0320 16:23:15.276734 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5cecfcd-3911-40f8-924a-cd14f75c81ff" path="/var/lib/kubelet/pods/b5cecfcd-3911-40f8-924a-cd14f75c81ff/volumes" Mar 20 16:23:15 crc kubenswrapper[4813]: I0320 16:23:15.733573 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerStarted","Data":"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865"} Mar 20 16:23:15 crc kubenswrapper[4813]: I0320 16:23:15.733879 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerStarted","Data":"42d021e250ebfe4aeb7695bfa803f52a81afbe89b44fc345b1bde7febd035661"} Mar 20 16:23:16 crc kubenswrapper[4813]: I0320 16:23:16.745697 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerStarted","Data":"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253"} Mar 20 16:23:17 crc kubenswrapper[4813]: I0320 16:23:17.771357 4813 generic.go:334] "Generic (PLEG): container finished" podID="1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" containerID="ea94c04a30825396ef85c275c19b9e726c1c3df422495938aa8562d54748ccd2" exitCode=0 Mar 20 16:23:17 crc kubenswrapper[4813]: I0320 16:23:17.772236 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd","Type":"ContainerDied","Data":"ea94c04a30825396ef85c275c19b9e726c1c3df422495938aa8562d54748ccd2"} Mar 20 16:23:17 crc kubenswrapper[4813]: I0320 16:23:17.777367 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerStarted","Data":"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077"} Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.071659 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.229075 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-logs\") pod \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.229138 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdwmv\" (UniqueName: \"kubernetes.io/projected/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-kube-api-access-kdwmv\") pod \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.229175 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-combined-ca-bundle\") pod \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.229191 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-config-data\") pod \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.229225 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-custom-prometheus-ca\") pod \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.229253 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-cert-memcached-mtls\") pod \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\" (UID: \"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd\") " Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.229689 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-logs" (OuterVolumeSpecName: "logs") pod "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" (UID: "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.234995 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-kube-api-access-kdwmv" (OuterVolumeSpecName: "kube-api-access-kdwmv") pod "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" (UID: "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd"). InnerVolumeSpecName "kube-api-access-kdwmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.256908 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" (UID: "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.261333 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" (UID: "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.294953 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-config-data" (OuterVolumeSpecName: "config-data") pod "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" (UID: "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.317617 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" (UID: "1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.331404 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.331436 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.331445 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdwmv\" (UniqueName: \"kubernetes.io/projected/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-kube-api-access-kdwmv\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.331455 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.331466 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.331499 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.786733 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd","Type":"ContainerDied","Data":"091a2bebcf10afcdd9e8f7327454ee7b704a093c32c074aa2efb009fffcf3cb2"} Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.786774 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.787114 4813 scope.go:117] "RemoveContainer" containerID="ea94c04a30825396ef85c275c19b9e726c1c3df422495938aa8562d54748ccd2" Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.820128 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:23:18 crc kubenswrapper[4813]: I0320 16:23:18.825616 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:23:19 crc kubenswrapper[4813]: I0320 16:23:19.278851 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" path="/var/lib/kubelet/pods/1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd/volumes" Mar 20 16:23:19 crc kubenswrapper[4813]: I0320 16:23:19.798726 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerStarted","Data":"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240"} Mar 20 16:23:19 crc kubenswrapper[4813]: I0320 16:23:19.799124 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:19 crc kubenswrapper[4813]: I0320 16:23:19.821530 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.433608995 podStartE2EDuration="5.821512452s" podCreationTimestamp="2026-03-20 16:23:14 +0000 UTC" firstStartedPulling="2026-03-20 16:23:14.932822997 +0000 UTC m=+2724.355525838" lastFinishedPulling="2026-03-20 16:23:19.320726434 +0000 UTC m=+2728.743429295" observedRunningTime="2026-03-20 16:23:19.816873497 +0000 UTC m=+2729.239576358" watchObservedRunningTime="2026-03-20 16:23:19.821512452 +0000 UTC m=+2729.244215293" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.521537 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-qjrdt"] Mar 20 16:23:21 crc kubenswrapper[4813]: E0320 16:23:21.522148 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" containerName="watcher-decision-engine" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.522159 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" containerName="watcher-decision-engine" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.522305 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ec4b12c-b8ba-42dc-aaf4-7785c8b51ddd" containerName="watcher-decision-engine" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.522825 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.574526 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qjrdt"] Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.591373 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpbzp\" (UniqueName: \"kubernetes.io/projected/fc732f79-010e-47e2-87ab-f6a1c40c7288-kube-api-access-bpbzp\") pod \"watcher-db-create-qjrdt\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.591456 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc732f79-010e-47e2-87ab-f6a1c40c7288-operator-scripts\") pod \"watcher-db-create-qjrdt\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.597542 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k"] Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.598574 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.604760 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.608626 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k"] Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.692708 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-operator-scripts\") pod \"watcher-5a5a-account-create-update-2kb4k\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.692750 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj8nv\" (UniqueName: \"kubernetes.io/projected/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-kube-api-access-kj8nv\") pod \"watcher-5a5a-account-create-update-2kb4k\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.692778 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpbzp\" (UniqueName: \"kubernetes.io/projected/fc732f79-010e-47e2-87ab-f6a1c40c7288-kube-api-access-bpbzp\") pod \"watcher-db-create-qjrdt\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.692825 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc732f79-010e-47e2-87ab-f6a1c40c7288-operator-scripts\") pod \"watcher-db-create-qjrdt\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.693518 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc732f79-010e-47e2-87ab-f6a1c40c7288-operator-scripts\") pod \"watcher-db-create-qjrdt\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.727025 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpbzp\" (UniqueName: \"kubernetes.io/projected/fc732f79-010e-47e2-87ab-f6a1c40c7288-kube-api-access-bpbzp\") pod \"watcher-db-create-qjrdt\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.794114 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-operator-scripts\") pod \"watcher-5a5a-account-create-update-2kb4k\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.794166 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj8nv\" (UniqueName: \"kubernetes.io/projected/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-kube-api-access-kj8nv\") pod \"watcher-5a5a-account-create-update-2kb4k\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.794764 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-operator-scripts\") pod \"watcher-5a5a-account-create-update-2kb4k\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.812661 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj8nv\" (UniqueName: \"kubernetes.io/projected/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-kube-api-access-kj8nv\") pod \"watcher-5a5a-account-create-update-2kb4k\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.883455 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:21 crc kubenswrapper[4813]: I0320 16:23:21.978787 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.335322 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qjrdt"] Mar 20 16:23:22 crc kubenswrapper[4813]: W0320 16:23:22.336883 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc732f79_010e_47e2_87ab_f6a1c40c7288.slice/crio-1052a03fa8e7911e7b2915786846b4754e2ff1938dc1d517318dd2949b0b2b60 WatchSource:0}: Error finding container 1052a03fa8e7911e7b2915786846b4754e2ff1938dc1d517318dd2949b0b2b60: Status 404 returned error can't find the container with id 1052a03fa8e7911e7b2915786846b4754e2ff1938dc1d517318dd2949b0b2b60 Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.465697 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k"] Mar 20 16:23:22 crc kubenswrapper[4813]: W0320 16:23:22.466213 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded1a0f2e_c2b6_47bd_a85d_07044d9563a2.slice/crio-fde56f2cb200a499bc6307bb8ed6bba240bb26473d452b5cba19e16177336615 WatchSource:0}: Error finding container fde56f2cb200a499bc6307bb8ed6bba240bb26473d452b5cba19e16177336615: Status 404 returned error can't find the container with id fde56f2cb200a499bc6307bb8ed6bba240bb26473d452b5cba19e16177336615 Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.823739 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" event={"ID":"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2","Type":"ContainerStarted","Data":"173500e822aba777df32f7a3c87bf912fdaa4febed3dd8b89dcd22d9353d9cc8"} Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.823783 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" event={"ID":"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2","Type":"ContainerStarted","Data":"fde56f2cb200a499bc6307bb8ed6bba240bb26473d452b5cba19e16177336615"} Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.825255 4813 generic.go:334] "Generic (PLEG): container finished" podID="fc732f79-010e-47e2-87ab-f6a1c40c7288" containerID="bd16f13546fd49ec42684779e2a3637de7a6745e5bf1e050fcfd7c6b47b163c9" exitCode=0 Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.825292 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qjrdt" event={"ID":"fc732f79-010e-47e2-87ab-f6a1c40c7288","Type":"ContainerDied","Data":"bd16f13546fd49ec42684779e2a3637de7a6745e5bf1e050fcfd7c6b47b163c9"} Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.825315 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qjrdt" event={"ID":"fc732f79-010e-47e2-87ab-f6a1c40c7288","Type":"ContainerStarted","Data":"1052a03fa8e7911e7b2915786846b4754e2ff1938dc1d517318dd2949b0b2b60"} Mar 20 16:23:22 crc kubenswrapper[4813]: I0320 16:23:22.840448 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" podStartSLOduration=1.840428641 podStartE2EDuration="1.840428641s" podCreationTimestamp="2026-03-20 16:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:23:22.838247682 +0000 UTC m=+2732.260950523" watchObservedRunningTime="2026-03-20 16:23:22.840428641 +0000 UTC m=+2732.263131492" Mar 20 16:23:23 crc kubenswrapper[4813]: I0320 16:23:23.266272 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:23:23 crc kubenswrapper[4813]: E0320 16:23:23.266557 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:23:23 crc kubenswrapper[4813]: I0320 16:23:23.834234 4813 generic.go:334] "Generic (PLEG): container finished" podID="ed1a0f2e-c2b6-47bd-a85d-07044d9563a2" containerID="173500e822aba777df32f7a3c87bf912fdaa4febed3dd8b89dcd22d9353d9cc8" exitCode=0 Mar 20 16:23:23 crc kubenswrapper[4813]: I0320 16:23:23.834425 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" event={"ID":"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2","Type":"ContainerDied","Data":"173500e822aba777df32f7a3c87bf912fdaa4febed3dd8b89dcd22d9353d9cc8"} Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.231515 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.332686 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc732f79-010e-47e2-87ab-f6a1c40c7288-operator-scripts\") pod \"fc732f79-010e-47e2-87ab-f6a1c40c7288\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.332794 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpbzp\" (UniqueName: \"kubernetes.io/projected/fc732f79-010e-47e2-87ab-f6a1c40c7288-kube-api-access-bpbzp\") pod \"fc732f79-010e-47e2-87ab-f6a1c40c7288\" (UID: \"fc732f79-010e-47e2-87ab-f6a1c40c7288\") " Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.334236 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc732f79-010e-47e2-87ab-f6a1c40c7288-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fc732f79-010e-47e2-87ab-f6a1c40c7288" (UID: "fc732f79-010e-47e2-87ab-f6a1c40c7288"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.338678 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc732f79-010e-47e2-87ab-f6a1c40c7288-kube-api-access-bpbzp" (OuterVolumeSpecName: "kube-api-access-bpbzp") pod "fc732f79-010e-47e2-87ab-f6a1c40c7288" (UID: "fc732f79-010e-47e2-87ab-f6a1c40c7288"). InnerVolumeSpecName "kube-api-access-bpbzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.434227 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc732f79-010e-47e2-87ab-f6a1c40c7288-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.434272 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpbzp\" (UniqueName: \"kubernetes.io/projected/fc732f79-010e-47e2-87ab-f6a1c40c7288-kube-api-access-bpbzp\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.844896 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-qjrdt" Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.844924 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-qjrdt" event={"ID":"fc732f79-010e-47e2-87ab-f6a1c40c7288","Type":"ContainerDied","Data":"1052a03fa8e7911e7b2915786846b4754e2ff1938dc1d517318dd2949b0b2b60"} Mar 20 16:23:24 crc kubenswrapper[4813]: I0320 16:23:24.845153 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1052a03fa8e7911e7b2915786846b4754e2ff1938dc1d517318dd2949b0b2b60" Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.184389 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.350706 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kj8nv\" (UniqueName: \"kubernetes.io/projected/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-kube-api-access-kj8nv\") pod \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.350797 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-operator-scripts\") pod \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\" (UID: \"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2\") " Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.351802 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed1a0f2e-c2b6-47bd-a85d-07044d9563a2" (UID: "ed1a0f2e-c2b6-47bd-a85d-07044d9563a2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.354854 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-kube-api-access-kj8nv" (OuterVolumeSpecName: "kube-api-access-kj8nv") pod "ed1a0f2e-c2b6-47bd-a85d-07044d9563a2" (UID: "ed1a0f2e-c2b6-47bd-a85d-07044d9563a2"). InnerVolumeSpecName "kube-api-access-kj8nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.452407 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kj8nv\" (UniqueName: \"kubernetes.io/projected/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-kube-api-access-kj8nv\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.452441 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.856174 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" event={"ID":"ed1a0f2e-c2b6-47bd-a85d-07044d9563a2","Type":"ContainerDied","Data":"fde56f2cb200a499bc6307bb8ed6bba240bb26473d452b5cba19e16177336615"} Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.856225 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k" Mar 20 16:23:25 crc kubenswrapper[4813]: I0320 16:23:25.856228 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fde56f2cb200a499bc6307bb8ed6bba240bb26473d452b5cba19e16177336615" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.957410 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-98vxl"] Mar 20 16:23:26 crc kubenswrapper[4813]: E0320 16:23:26.958058 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc732f79-010e-47e2-87ab-f6a1c40c7288" containerName="mariadb-database-create" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.958073 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc732f79-010e-47e2-87ab-f6a1c40c7288" containerName="mariadb-database-create" Mar 20 16:23:26 crc kubenswrapper[4813]: E0320 16:23:26.958090 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed1a0f2e-c2b6-47bd-a85d-07044d9563a2" containerName="mariadb-account-create-update" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.958097 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed1a0f2e-c2b6-47bd-a85d-07044d9563a2" containerName="mariadb-account-create-update" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.958240 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc732f79-010e-47e2-87ab-f6a1c40c7288" containerName="mariadb-database-create" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.958258 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed1a0f2e-c2b6-47bd-a85d-07044d9563a2" containerName="mariadb-account-create-update" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.958813 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.961264 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-ws5vd" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.963010 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:23:26 crc kubenswrapper[4813]: I0320 16:23:26.970317 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-98vxl"] Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.077020 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-config-data\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.077080 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2wgn\" (UniqueName: \"kubernetes.io/projected/236617cb-ca1c-4f76-bd23-e81457053874-kube-api-access-t2wgn\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.077107 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-db-sync-config-data\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.077171 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.178651 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-config-data\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.178701 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2wgn\" (UniqueName: \"kubernetes.io/projected/236617cb-ca1c-4f76-bd23-e81457053874-kube-api-access-t2wgn\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.178720 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-db-sync-config-data\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.178769 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.183859 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.185392 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-db-sync-config-data\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.189221 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-config-data\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.197268 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2wgn\" (UniqueName: \"kubernetes.io/projected/236617cb-ca1c-4f76-bd23-e81457053874-kube-api-access-t2wgn\") pod \"watcher-kuttl-db-sync-98vxl\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.278520 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.769242 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-98vxl"] Mar 20 16:23:27 crc kubenswrapper[4813]: I0320 16:23:27.884469 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" event={"ID":"236617cb-ca1c-4f76-bd23-e81457053874","Type":"ContainerStarted","Data":"8293743f09bb0de719d07321bbd249ae55da3f7c840368d6d744e4cb2c9d587c"} Mar 20 16:23:28 crc kubenswrapper[4813]: I0320 16:23:28.893601 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" event={"ID":"236617cb-ca1c-4f76-bd23-e81457053874","Type":"ContainerStarted","Data":"59b3434a9e2f0d241decbd570fa4d9d692da6602c00fbb2ea2b93faeb7d50162"} Mar 20 16:23:28 crc kubenswrapper[4813]: I0320 16:23:28.907954 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" podStartSLOduration=2.907939667 podStartE2EDuration="2.907939667s" podCreationTimestamp="2026-03-20 16:23:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:23:28.905403079 +0000 UTC m=+2738.328105910" watchObservedRunningTime="2026-03-20 16:23:28.907939667 +0000 UTC m=+2738.330642508" Mar 20 16:23:30 crc kubenswrapper[4813]: I0320 16:23:30.911271 4813 generic.go:334] "Generic (PLEG): container finished" podID="236617cb-ca1c-4f76-bd23-e81457053874" containerID="59b3434a9e2f0d241decbd570fa4d9d692da6602c00fbb2ea2b93faeb7d50162" exitCode=0 Mar 20 16:23:30 crc kubenswrapper[4813]: I0320 16:23:30.911329 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" event={"ID":"236617cb-ca1c-4f76-bd23-e81457053874","Type":"ContainerDied","Data":"59b3434a9e2f0d241decbd570fa4d9d692da6602c00fbb2ea2b93faeb7d50162"} Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.342904 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.472896 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-combined-ca-bundle\") pod \"236617cb-ca1c-4f76-bd23-e81457053874\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.473049 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2wgn\" (UniqueName: \"kubernetes.io/projected/236617cb-ca1c-4f76-bd23-e81457053874-kube-api-access-t2wgn\") pod \"236617cb-ca1c-4f76-bd23-e81457053874\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.473087 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-config-data\") pod \"236617cb-ca1c-4f76-bd23-e81457053874\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.473173 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-db-sync-config-data\") pod \"236617cb-ca1c-4f76-bd23-e81457053874\" (UID: \"236617cb-ca1c-4f76-bd23-e81457053874\") " Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.477997 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "236617cb-ca1c-4f76-bd23-e81457053874" (UID: "236617cb-ca1c-4f76-bd23-e81457053874"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.478002 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/236617cb-ca1c-4f76-bd23-e81457053874-kube-api-access-t2wgn" (OuterVolumeSpecName: "kube-api-access-t2wgn") pod "236617cb-ca1c-4f76-bd23-e81457053874" (UID: "236617cb-ca1c-4f76-bd23-e81457053874"). InnerVolumeSpecName "kube-api-access-t2wgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.497900 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "236617cb-ca1c-4f76-bd23-e81457053874" (UID: "236617cb-ca1c-4f76-bd23-e81457053874"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.513970 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-config-data" (OuterVolumeSpecName: "config-data") pod "236617cb-ca1c-4f76-bd23-e81457053874" (UID: "236617cb-ca1c-4f76-bd23-e81457053874"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.575527 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.575576 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2wgn\" (UniqueName: \"kubernetes.io/projected/236617cb-ca1c-4f76-bd23-e81457053874-kube-api-access-t2wgn\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.575591 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.575603 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/236617cb-ca1c-4f76-bd23-e81457053874-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.929572 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.929463 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-98vxl" event={"ID":"236617cb-ca1c-4f76-bd23-e81457053874","Type":"ContainerDied","Data":"8293743f09bb0de719d07321bbd249ae55da3f7c840368d6d744e4cb2c9d587c"} Mar 20 16:23:32 crc kubenswrapper[4813]: I0320 16:23:32.931006 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8293743f09bb0de719d07321bbd249ae55da3f7c840368d6d744e4cb2c9d587c" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.207144 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:23:33 crc kubenswrapper[4813]: E0320 16:23:33.207573 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236617cb-ca1c-4f76-bd23-e81457053874" containerName="watcher-kuttl-db-sync" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.207597 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="236617cb-ca1c-4f76-bd23-e81457053874" containerName="watcher-kuttl-db-sync" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.207796 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="236617cb-ca1c-4f76-bd23-e81457053874" containerName="watcher-kuttl-db-sync" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.208921 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.211611 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.212223 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-ws5vd" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.218346 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.277144 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.278186 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.280366 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.287227 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.287364 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c4dx\" (UniqueName: \"kubernetes.io/projected/2a0386d0-150b-4801-a553-9ed04dda83da-kube-api-access-7c4dx\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.287449 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.287555 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.287604 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0386d0-150b-4801-a553-9ed04dda83da-logs\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.287667 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.291620 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.307704 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.308735 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.315566 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.330973 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.388723 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqs8n\" (UniqueName: \"kubernetes.io/projected/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-kube-api-access-jqs8n\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.388880 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.388990 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0386d0-150b-4801-a553-9ed04dda83da-logs\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389058 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389092 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389368 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389440 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4987895-e3f1-4081-bee3-e2dbacf04dd1-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389462 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389500 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389525 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389556 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0386d0-150b-4801-a553-9ed04dda83da-logs\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389569 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389640 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvkkw\" (UniqueName: \"kubernetes.io/projected/e4987895-e3f1-4081-bee3-e2dbacf04dd1-kube-api-access-wvkkw\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389844 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c4dx\" (UniqueName: \"kubernetes.io/projected/2a0386d0-150b-4801-a553-9ed04dda83da-kube-api-access-7c4dx\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389880 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389920 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.389975 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.393746 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.393795 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.393906 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.397267 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.406225 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c4dx\" (UniqueName: \"kubernetes.io/projected/2a0386d0-150b-4801-a553-9ed04dda83da-kube-api-access-7c4dx\") pod \"watcher-kuttl-api-0\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491531 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491581 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491616 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491645 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491683 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4987895-e3f1-4081-bee3-e2dbacf04dd1-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491737 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491785 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvkkw\" (UniqueName: \"kubernetes.io/projected/e4987895-e3f1-4081-bee3-e2dbacf04dd1-kube-api-access-wvkkw\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491830 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491873 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.491902 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqs8n\" (UniqueName: \"kubernetes.io/projected/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-kube-api-access-jqs8n\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.493068 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.493151 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4987895-e3f1-4081-bee3-e2dbacf04dd1-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.496277 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.496411 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.496844 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.496883 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.498232 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.498545 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.503249 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.509866 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvkkw\" (UniqueName: \"kubernetes.io/projected/e4987895-e3f1-4081-bee3-e2dbacf04dd1-kube-api-access-wvkkw\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.521960 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqs8n\" (UniqueName: \"kubernetes.io/projected/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-kube-api-access-jqs8n\") pod \"watcher-kuttl-applier-0\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.528283 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.601510 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:33 crc kubenswrapper[4813]: I0320 16:23:33.635761 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:34 crc kubenswrapper[4813]: W0320 16:23:34.076092 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a0386d0_150b_4801_a553_9ed04dda83da.slice/crio-d589945abcfdce001b2cc5745951a32ff0a93a4f298a4e0d3c150142796b49d1 WatchSource:0}: Error finding container d589945abcfdce001b2cc5745951a32ff0a93a4f298a4e0d3c150142796b49d1: Status 404 returned error can't find the container with id d589945abcfdce001b2cc5745951a32ff0a93a4f298a4e0d3c150142796b49d1 Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.083988 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.187943 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:23:34 crc kubenswrapper[4813]: W0320 16:23:34.194898 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4987895_e3f1_4081_bee3_e2dbacf04dd1.slice/crio-d012d1b2c19150907fce6c6e373ceab670a6063ecbfbd0799d39cf91fa2c65be WatchSource:0}: Error finding container d012d1b2c19150907fce6c6e373ceab670a6063ecbfbd0799d39cf91fa2c65be: Status 404 returned error can't find the container with id d012d1b2c19150907fce6c6e373ceab670a6063ecbfbd0799d39cf91fa2c65be Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.196588 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.944780 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"2a0386d0-150b-4801-a553-9ed04dda83da","Type":"ContainerStarted","Data":"69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576"} Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.945158 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"2a0386d0-150b-4801-a553-9ed04dda83da","Type":"ContainerStarted","Data":"c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150"} Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.945177 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"2a0386d0-150b-4801-a553-9ed04dda83da","Type":"ContainerStarted","Data":"d589945abcfdce001b2cc5745951a32ff0a93a4f298a4e0d3c150142796b49d1"} Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.945193 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.947683 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"e4987895-e3f1-4081-bee3-e2dbacf04dd1","Type":"ContainerStarted","Data":"7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19"} Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.947731 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"e4987895-e3f1-4081-bee3-e2dbacf04dd1","Type":"ContainerStarted","Data":"d012d1b2c19150907fce6c6e373ceab670a6063ecbfbd0799d39cf91fa2c65be"} Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.949831 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9","Type":"ContainerStarted","Data":"a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72"} Mar 20 16:23:34 crc kubenswrapper[4813]: I0320 16:23:34.949865 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9","Type":"ContainerStarted","Data":"60f8ce5f8ebfeee5c9f1818864e44cce62ddbda1c179a27f41ea180a4f657600"} Mar 20 16:23:35 crc kubenswrapper[4813]: I0320 16:23:35.034548 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.034531346 podStartE2EDuration="2.034531346s" podCreationTimestamp="2026-03-20 16:23:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:23:35.029045028 +0000 UTC m=+2744.451747889" watchObservedRunningTime="2026-03-20 16:23:35.034531346 +0000 UTC m=+2744.457234187" Mar 20 16:23:35 crc kubenswrapper[4813]: I0320 16:23:35.077275 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=2.077258379 podStartE2EDuration="2.077258379s" podCreationTimestamp="2026-03-20 16:23:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:23:35.071243387 +0000 UTC m=+2744.493946228" watchObservedRunningTime="2026-03-20 16:23:35.077258379 +0000 UTC m=+2744.499961220" Mar 20 16:23:35 crc kubenswrapper[4813]: I0320 16:23:35.100297 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.10028007 podStartE2EDuration="2.10028007s" podCreationTimestamp="2026-03-20 16:23:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:23:35.096993041 +0000 UTC m=+2744.519695882" watchObservedRunningTime="2026-03-20 16:23:35.10028007 +0000 UTC m=+2744.522982911" Mar 20 16:23:35 crc kubenswrapper[4813]: I0320 16:23:35.115754 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:36 crc kubenswrapper[4813]: I0320 16:23:36.265408 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:23:36 crc kubenswrapper[4813]: E0320 16:23:36.266860 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:23:36 crc kubenswrapper[4813]: I0320 16:23:36.288198 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:37 crc kubenswrapper[4813]: I0320 16:23:37.069076 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:37 crc kubenswrapper[4813]: I0320 16:23:37.498821 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:38 crc kubenswrapper[4813]: I0320 16:23:38.529636 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:38 crc kubenswrapper[4813]: I0320 16:23:38.636641 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:38 crc kubenswrapper[4813]: I0320 16:23:38.723315 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:39 crc kubenswrapper[4813]: I0320 16:23:39.967799 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:41 crc kubenswrapper[4813]: I0320 16:23:41.141777 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:42 crc kubenswrapper[4813]: I0320 16:23:42.384635 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:43 crc kubenswrapper[4813]: I0320 16:23:43.529910 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:43 crc kubenswrapper[4813]: I0320 16:23:43.537240 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:43 crc kubenswrapper[4813]: I0320 16:23:43.601669 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:43 crc kubenswrapper[4813]: I0320 16:23:43.628995 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:43 crc kubenswrapper[4813]: I0320 16:23:43.633264 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:43 crc kubenswrapper[4813]: I0320 16:23:43.636779 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:43 crc kubenswrapper[4813]: I0320 16:23:43.666687 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:44 crc kubenswrapper[4813]: I0320 16:23:44.078361 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:44 crc kubenswrapper[4813]: I0320 16:23:44.082597 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:23:44 crc kubenswrapper[4813]: I0320 16:23:44.105731 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:23:44 crc kubenswrapper[4813]: I0320 16:23:44.109376 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:23:44 crc kubenswrapper[4813]: I0320 16:23:44.479065 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:44 crc kubenswrapper[4813]: I0320 16:23:44.807739 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.047441 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.277342 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-db-create-pmsn7"] Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.278301 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.290115 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-db-create-pmsn7"] Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.382500 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-ff97-account-create-update-6wll2"] Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.383523 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.385054 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-db-secret" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.397733 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63b748b2-8139-45e5-b2b5-42b9eacaff48-operator-scripts\") pod \"cinder-db-create-pmsn7\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.398136 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfzrs\" (UniqueName: \"kubernetes.io/projected/63b748b2-8139-45e5-b2b5-42b9eacaff48-kube-api-access-xfzrs\") pod \"cinder-db-create-pmsn7\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.404097 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-ff97-account-create-update-6wll2"] Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.500082 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9tdl\" (UniqueName: \"kubernetes.io/projected/4bb55726-1843-49ef-904e-dfb5d7ea3d00-kube-api-access-w9tdl\") pod \"cinder-ff97-account-create-update-6wll2\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.500156 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63b748b2-8139-45e5-b2b5-42b9eacaff48-operator-scripts\") pod \"cinder-db-create-pmsn7\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.500183 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bb55726-1843-49ef-904e-dfb5d7ea3d00-operator-scripts\") pod \"cinder-ff97-account-create-update-6wll2\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.500265 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfzrs\" (UniqueName: \"kubernetes.io/projected/63b748b2-8139-45e5-b2b5-42b9eacaff48-kube-api-access-xfzrs\") pod \"cinder-db-create-pmsn7\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.501217 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63b748b2-8139-45e5-b2b5-42b9eacaff48-operator-scripts\") pod \"cinder-db-create-pmsn7\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.523741 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfzrs\" (UniqueName: \"kubernetes.io/projected/63b748b2-8139-45e5-b2b5-42b9eacaff48-kube-api-access-xfzrs\") pod \"cinder-db-create-pmsn7\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.601881 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9tdl\" (UniqueName: \"kubernetes.io/projected/4bb55726-1843-49ef-904e-dfb5d7ea3d00-kube-api-access-w9tdl\") pod \"cinder-ff97-account-create-update-6wll2\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.602233 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bb55726-1843-49ef-904e-dfb5d7ea3d00-operator-scripts\") pod \"cinder-ff97-account-create-update-6wll2\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.602844 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bb55726-1843-49ef-904e-dfb5d7ea3d00-operator-scripts\") pod \"cinder-ff97-account-create-update-6wll2\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.624448 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9tdl\" (UniqueName: \"kubernetes.io/projected/4bb55726-1843-49ef-904e-dfb5d7ea3d00-kube-api-access-w9tdl\") pod \"cinder-ff97-account-create-update-6wll2\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.632353 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:45 crc kubenswrapper[4813]: I0320 16:23:45.698804 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.093631 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-db-create-pmsn7"] Mar 20 16:23:46 crc kubenswrapper[4813]: W0320 16:23:46.098459 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63b748b2_8139_45e5_b2b5_42b9eacaff48.slice/crio-359bdbde6691b8b76bb706ef1f6a4a64ac9f30d5fb95c086e9e3ac8ff9a476f7 WatchSource:0}: Error finding container 359bdbde6691b8b76bb706ef1f6a4a64ac9f30d5fb95c086e9e3ac8ff9a476f7: Status 404 returned error can't find the container with id 359bdbde6691b8b76bb706ef1f6a4a64ac9f30d5fb95c086e9e3ac8ff9a476f7 Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.229441 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:46 crc kubenswrapper[4813]: W0320 16:23:46.232417 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb55726_1843_49ef_904e_dfb5d7ea3d00.slice/crio-ed4ed50c7bed034301c7c81a6c0bdbeec4caaf4eb96de856ca719974cfc31e62 WatchSource:0}: Error finding container ed4ed50c7bed034301c7c81a6c0bdbeec4caaf4eb96de856ca719974cfc31e62: Status 404 returned error can't find the container with id ed4ed50c7bed034301c7c81a6c0bdbeec4caaf4eb96de856ca719974cfc31e62 Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.235152 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-ff97-account-create-update-6wll2"] Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.951168 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.951621 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-central-agent" containerID="cri-o://3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865" gracePeriod=30 Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.951676 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="proxy-httpd" containerID="cri-o://52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240" gracePeriod=30 Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.951695 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-notification-agent" containerID="cri-o://da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253" gracePeriod=30 Mar 20 16:23:46 crc kubenswrapper[4813]: I0320 16:23:46.951707 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="sg-core" containerID="cri-o://ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077" gracePeriod=30 Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.104580 4813 generic.go:334] "Generic (PLEG): container finished" podID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerID="ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077" exitCode=2 Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.104661 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerDied","Data":"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077"} Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.106379 4813 generic.go:334] "Generic (PLEG): container finished" podID="4bb55726-1843-49ef-904e-dfb5d7ea3d00" containerID="007f35ee459cf81da479c6c75adcb3296ba4907325104e4f50aae28d4d8fb198" exitCode=0 Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.106406 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" event={"ID":"4bb55726-1843-49ef-904e-dfb5d7ea3d00","Type":"ContainerDied","Data":"007f35ee459cf81da479c6c75adcb3296ba4907325104e4f50aae28d4d8fb198"} Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.106428 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" event={"ID":"4bb55726-1843-49ef-904e-dfb5d7ea3d00","Type":"ContainerStarted","Data":"ed4ed50c7bed034301c7c81a6c0bdbeec4caaf4eb96de856ca719974cfc31e62"} Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.107811 4813 generic.go:334] "Generic (PLEG): container finished" podID="63b748b2-8139-45e5-b2b5-42b9eacaff48" containerID="b61af43d9c34e9ac555ff5288ea02486e89f3716b7f991e871ce5fa3ea5ef108" exitCode=0 Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.107835 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-db-create-pmsn7" event={"ID":"63b748b2-8139-45e5-b2b5-42b9eacaff48","Type":"ContainerDied","Data":"b61af43d9c34e9ac555ff5288ea02486e89f3716b7f991e871ce5fa3ea5ef108"} Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.107849 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-db-create-pmsn7" event={"ID":"63b748b2-8139-45e5-b2b5-42b9eacaff48","Type":"ContainerStarted","Data":"359bdbde6691b8b76bb706ef1f6a4a64ac9f30d5fb95c086e9e3ac8ff9a476f7"} Mar 20 16:23:47 crc kubenswrapper[4813]: I0320 16:23:47.415164 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.104203 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117247 4813 generic.go:334] "Generic (PLEG): container finished" podID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerID="52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240" exitCode=0 Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117274 4813 generic.go:334] "Generic (PLEG): container finished" podID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerID="da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253" exitCode=0 Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117284 4813 generic.go:334] "Generic (PLEG): container finished" podID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerID="3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865" exitCode=0 Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117440 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117850 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerDied","Data":"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240"} Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117881 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerDied","Data":"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253"} Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117893 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerDied","Data":"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865"} Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117903 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"0dc16b36-d581-4e7c-9059-31fec0fc4810","Type":"ContainerDied","Data":"42d021e250ebfe4aeb7695bfa803f52a81afbe89b44fc345b1bde7febd035661"} Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.117917 4813 scope.go:117] "RemoveContainer" containerID="52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137042 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-run-httpd\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137093 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2zpl\" (UniqueName: \"kubernetes.io/projected/0dc16b36-d581-4e7c-9059-31fec0fc4810-kube-api-access-f2zpl\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137132 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-combined-ca-bundle\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137249 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-ceilometer-tls-certs\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137297 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-scripts\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137360 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-config-data\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137402 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-sg-core-conf-yaml\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.137426 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-log-httpd\") pod \"0dc16b36-d581-4e7c-9059-31fec0fc4810\" (UID: \"0dc16b36-d581-4e7c-9059-31fec0fc4810\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.138603 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.138929 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.144340 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-scripts" (OuterVolumeSpecName: "scripts") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.165700 4813 scope.go:117] "RemoveContainer" containerID="ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.165729 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc16b36-d581-4e7c-9059-31fec0fc4810-kube-api-access-f2zpl" (OuterVolumeSpecName: "kube-api-access-f2zpl") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "kube-api-access-f2zpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.231138 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.237221 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.237643 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.239160 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2zpl\" (UniqueName: \"kubernetes.io/projected/0dc16b36-d581-4e7c-9059-31fec0fc4810-kube-api-access-f2zpl\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.239195 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.239204 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.239214 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.239222 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.239230 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.239238 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0dc16b36-d581-4e7c-9059-31fec0fc4810-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.250935 4813 scope.go:117] "RemoveContainer" containerID="da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.255631 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-config-data" (OuterVolumeSpecName: "config-data") pod "0dc16b36-d581-4e7c-9059-31fec0fc4810" (UID: "0dc16b36-d581-4e7c-9059-31fec0fc4810"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.265789 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.266083 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.287618 4813 scope.go:117] "RemoveContainer" containerID="3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.309572 4813 scope.go:117] "RemoveContainer" containerID="52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.310339 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240\": container with ID starting with 52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240 not found: ID does not exist" containerID="52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.310396 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240"} err="failed to get container status \"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240\": rpc error: code = NotFound desc = could not find container \"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240\": container with ID starting with 52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.310435 4813 scope.go:117] "RemoveContainer" containerID="ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.310843 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077\": container with ID starting with ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077 not found: ID does not exist" containerID="ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.310878 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077"} err="failed to get container status \"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077\": rpc error: code = NotFound desc = could not find container \"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077\": container with ID starting with ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.310904 4813 scope.go:117] "RemoveContainer" containerID="da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.311147 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253\": container with ID starting with da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253 not found: ID does not exist" containerID="da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.311173 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253"} err="failed to get container status \"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253\": rpc error: code = NotFound desc = could not find container \"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253\": container with ID starting with da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.311187 4813 scope.go:117] "RemoveContainer" containerID="3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.311416 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865\": container with ID starting with 3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865 not found: ID does not exist" containerID="3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.311435 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865"} err="failed to get container status \"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865\": rpc error: code = NotFound desc = could not find container \"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865\": container with ID starting with 3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.311450 4813 scope.go:117] "RemoveContainer" containerID="52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.311760 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240"} err="failed to get container status \"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240\": rpc error: code = NotFound desc = could not find container \"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240\": container with ID starting with 52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.311785 4813 scope.go:117] "RemoveContainer" containerID="ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312131 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077"} err="failed to get container status \"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077\": rpc error: code = NotFound desc = could not find container \"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077\": container with ID starting with ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312150 4813 scope.go:117] "RemoveContainer" containerID="da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312521 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253"} err="failed to get container status \"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253\": rpc error: code = NotFound desc = could not find container \"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253\": container with ID starting with da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312541 4813 scope.go:117] "RemoveContainer" containerID="3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312726 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865"} err="failed to get container status \"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865\": rpc error: code = NotFound desc = could not find container \"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865\": container with ID starting with 3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312744 4813 scope.go:117] "RemoveContainer" containerID="52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312949 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240"} err="failed to get container status \"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240\": rpc error: code = NotFound desc = could not find container \"52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240\": container with ID starting with 52342cbf421da4d9daa31157bfedaac85f579e8cd6e3396deed30923c2316240 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.312968 4813 scope.go:117] "RemoveContainer" containerID="ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.313135 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077"} err="failed to get container status \"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077\": rpc error: code = NotFound desc = could not find container \"ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077\": container with ID starting with ba9e47d862bf34a026336b7cca363cfa5268d325f1c3815b3c66c21d40bd2077 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.313152 4813 scope.go:117] "RemoveContainer" containerID="da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.313438 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253"} err="failed to get container status \"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253\": rpc error: code = NotFound desc = could not find container \"da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253\": container with ID starting with da21891d2ed4fe2125d63eff101f5172a2f052f664a48d28483facfcabce5253 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.313454 4813 scope.go:117] "RemoveContainer" containerID="3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.315633 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865"} err="failed to get container status \"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865\": rpc error: code = NotFound desc = could not find container \"3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865\": container with ID starting with 3d9a2ccd8b68ec88c6fc615477ddaf6cfb69b3afb15582d4a2f8c7620c303865 not found: ID does not exist" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.340437 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc16b36-d581-4e7c-9059-31fec0fc4810-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.460631 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.460681 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.480596 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.481372 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="proxy-httpd" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.481475 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="proxy-httpd" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.481600 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="sg-core" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.481688 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="sg-core" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.481941 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-notification-agent" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.482038 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-notification-agent" Mar 20 16:23:48 crc kubenswrapper[4813]: E0320 16:23:48.482168 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-central-agent" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.482275 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-central-agent" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.482568 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="proxy-httpd" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.482655 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-central-agent" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.482711 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="ceilometer-notification-agent" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.482781 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" containerName="sg-core" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.484368 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.488093 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.488192 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.491246 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.494266 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.522551 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.556214 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bb55726-1843-49ef-904e-dfb5d7ea3d00-operator-scripts\") pod \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.556528 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9tdl\" (UniqueName: \"kubernetes.io/projected/4bb55726-1843-49ef-904e-dfb5d7ea3d00-kube-api-access-w9tdl\") pod \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\" (UID: \"4bb55726-1843-49ef-904e-dfb5d7ea3d00\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.556835 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb55726-1843-49ef-904e-dfb5d7ea3d00-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4bb55726-1843-49ef-904e-dfb5d7ea3d00" (UID: "4bb55726-1843-49ef-904e-dfb5d7ea3d00"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.557184 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.557371 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-config-data\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.557505 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m64x6\" (UniqueName: \"kubernetes.io/projected/a7d010c8-b2f7-4e64-afe3-0abd085364a4-kube-api-access-m64x6\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.557649 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.557841 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-scripts\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.557969 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.558104 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.558320 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.558520 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bb55726-1843-49ef-904e-dfb5d7ea3d00-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.558807 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.560731 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb55726-1843-49ef-904e-dfb5d7ea3d00-kube-api-access-w9tdl" (OuterVolumeSpecName: "kube-api-access-w9tdl") pod "4bb55726-1843-49ef-904e-dfb5d7ea3d00" (UID: "4bb55726-1843-49ef-904e-dfb5d7ea3d00"). InnerVolumeSpecName "kube-api-access-w9tdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.600193 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.659695 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63b748b2-8139-45e5-b2b5-42b9eacaff48-operator-scripts\") pod \"63b748b2-8139-45e5-b2b5-42b9eacaff48\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660127 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfzrs\" (UniqueName: \"kubernetes.io/projected/63b748b2-8139-45e5-b2b5-42b9eacaff48-kube-api-access-xfzrs\") pod \"63b748b2-8139-45e5-b2b5-42b9eacaff48\" (UID: \"63b748b2-8139-45e5-b2b5-42b9eacaff48\") " Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660342 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660372 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660411 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-config-data\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660426 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m64x6\" (UniqueName: \"kubernetes.io/projected/a7d010c8-b2f7-4e64-afe3-0abd085364a4-kube-api-access-m64x6\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660442 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660486 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-scripts\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660513 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660540 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660578 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9tdl\" (UniqueName: \"kubernetes.io/projected/4bb55726-1843-49ef-904e-dfb5d7ea3d00-kube-api-access-w9tdl\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.660702 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63b748b2-8139-45e5-b2b5-42b9eacaff48-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "63b748b2-8139-45e5-b2b5-42b9eacaff48" (UID: "63b748b2-8139-45e5-b2b5-42b9eacaff48"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.661640 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-run-httpd\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.662346 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-log-httpd\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.664444 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63b748b2-8139-45e5-b2b5-42b9eacaff48-kube-api-access-xfzrs" (OuterVolumeSpecName: "kube-api-access-xfzrs") pod "63b748b2-8139-45e5-b2b5-42b9eacaff48" (UID: "63b748b2-8139-45e5-b2b5-42b9eacaff48"). InnerVolumeSpecName "kube-api-access-xfzrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.665943 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-config-data\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.666485 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.667305 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-scripts\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.670986 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.671357 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.678138 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m64x6\" (UniqueName: \"kubernetes.io/projected/a7d010c8-b2f7-4e64-afe3-0abd085364a4-kube-api-access-m64x6\") pod \"ceilometer-0\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.762274 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/63b748b2-8139-45e5-b2b5-42b9eacaff48-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.762315 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfzrs\" (UniqueName: \"kubernetes.io/projected/63b748b2-8139-45e5-b2b5-42b9eacaff48-kube-api-access-xfzrs\") on node \"crc\" DevicePath \"\"" Mar 20 16:23:48 crc kubenswrapper[4813]: I0320 16:23:48.811913 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.130129 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" event={"ID":"4bb55726-1843-49ef-904e-dfb5d7ea3d00","Type":"ContainerDied","Data":"ed4ed50c7bed034301c7c81a6c0bdbeec4caaf4eb96de856ca719974cfc31e62"} Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.130180 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed4ed50c7bed034301c7c81a6c0bdbeec4caaf4eb96de856ca719974cfc31e62" Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.130257 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-ff97-account-create-update-6wll2" Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.134904 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-db-create-pmsn7" event={"ID":"63b748b2-8139-45e5-b2b5-42b9eacaff48","Type":"ContainerDied","Data":"359bdbde6691b8b76bb706ef1f6a4a64ac9f30d5fb95c086e9e3ac8ff9a476f7"} Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.134951 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="359bdbde6691b8b76bb706ef1f6a4a64ac9f30d5fb95c086e9e3ac8ff9a476f7" Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.135012 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-create-pmsn7" Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.305781 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc16b36-d581-4e7c-9059-31fec0fc4810" path="/var/lib/kubelet/pods/0dc16b36-d581-4e7c-9059-31fec0fc4810/volumes" Mar 20 16:23:49 crc kubenswrapper[4813]: E0320 16:23:49.364934 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63b748b2_8139_45e5_b2b5_42b9eacaff48.slice\": RecentStats: unable to find data in memory cache]" Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.666272 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:23:49 crc kubenswrapper[4813]: W0320 16:23:49.669932 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7d010c8_b2f7_4e64_afe3_0abd085364a4.slice/crio-98cc9a8acfbe4b453d65740e04fd91ef565a0ca661432a341d4a0b2ab772a869 WatchSource:0}: Error finding container 98cc9a8acfbe4b453d65740e04fd91ef565a0ca661432a341d4a0b2ab772a869: Status 404 returned error can't find the container with id 98cc9a8acfbe4b453d65740e04fd91ef565a0ca661432a341d4a0b2ab772a869 Mar 20 16:23:49 crc kubenswrapper[4813]: I0320 16:23:49.808933 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.153936 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerStarted","Data":"98cc9a8acfbe4b453d65740e04fd91ef565a0ca661432a341d4a0b2ab772a869"} Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.681703 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-db-sync-h2pgr"] Mar 20 16:23:50 crc kubenswrapper[4813]: E0320 16:23:50.682017 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b748b2-8139-45e5-b2b5-42b9eacaff48" containerName="mariadb-database-create" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.682036 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b748b2-8139-45e5-b2b5-42b9eacaff48" containerName="mariadb-database-create" Mar 20 16:23:50 crc kubenswrapper[4813]: E0320 16:23:50.682071 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb55726-1843-49ef-904e-dfb5d7ea3d00" containerName="mariadb-account-create-update" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.682079 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb55726-1843-49ef-904e-dfb5d7ea3d00" containerName="mariadb-account-create-update" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.682246 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="63b748b2-8139-45e5-b2b5-42b9eacaff48" containerName="mariadb-database-create" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.682268 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bb55726-1843-49ef-904e-dfb5d7ea3d00" containerName="mariadb-account-create-update" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.682895 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.687823 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-config-data" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.687829 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-scripts" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.687829 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-cinder-dockercfg-b8jff" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.694027 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-db-sync-h2pgr"] Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.800887 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-config-data\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.800979 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-combined-ca-bundle\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.801227 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-scripts\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.801296 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f13fe6e6-3a88-4d63-9726-d851cad85ecf-etc-machine-id\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.801501 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgzlj\" (UniqueName: \"kubernetes.io/projected/f13fe6e6-3a88-4d63-9726-d851cad85ecf-kube-api-access-hgzlj\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.801563 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-db-sync-config-data\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.903035 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgzlj\" (UniqueName: \"kubernetes.io/projected/f13fe6e6-3a88-4d63-9726-d851cad85ecf-kube-api-access-hgzlj\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.903077 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-db-sync-config-data\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.903129 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-config-data\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.903156 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-combined-ca-bundle\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.903220 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-scripts\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.903240 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f13fe6e6-3a88-4d63-9726-d851cad85ecf-etc-machine-id\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.903299 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f13fe6e6-3a88-4d63-9726-d851cad85ecf-etc-machine-id\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.907541 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-combined-ca-bundle\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.909435 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-config-data\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.910712 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-scripts\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.917972 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-db-sync-config-data\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.929707 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgzlj\" (UniqueName: \"kubernetes.io/projected/f13fe6e6-3a88-4d63-9726-d851cad85ecf-kube-api-access-hgzlj\") pod \"cinder-db-sync-h2pgr\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.972807 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:50 crc kubenswrapper[4813]: I0320 16:23:50.996587 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:23:51 crc kubenswrapper[4813]: I0320 16:23:51.173998 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerStarted","Data":"3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a"} Mar 20 16:23:51 crc kubenswrapper[4813]: I0320 16:23:51.174303 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerStarted","Data":"e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8"} Mar 20 16:23:51 crc kubenswrapper[4813]: I0320 16:23:51.446543 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-db-sync-h2pgr"] Mar 20 16:23:52 crc kubenswrapper[4813]: I0320 16:23:52.195434 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" event={"ID":"f13fe6e6-3a88-4d63-9726-d851cad85ecf","Type":"ContainerStarted","Data":"8e13243791feb3456d383e1306963d68d25e0f05474e77dd21605a2869a2c719"} Mar 20 16:23:52 crc kubenswrapper[4813]: I0320 16:23:52.207421 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:52 crc kubenswrapper[4813]: I0320 16:23:52.208151 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerStarted","Data":"7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953"} Mar 20 16:23:53 crc kubenswrapper[4813]: I0320 16:23:53.447118 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:54 crc kubenswrapper[4813]: I0320 16:23:54.244394 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerStarted","Data":"ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd"} Mar 20 16:23:54 crc kubenswrapper[4813]: I0320 16:23:54.247309 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:23:54 crc kubenswrapper[4813]: I0320 16:23:54.276287 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.547502775 podStartE2EDuration="6.276267231s" podCreationTimestamp="2026-03-20 16:23:48 +0000 UTC" firstStartedPulling="2026-03-20 16:23:49.673862897 +0000 UTC m=+2759.096565738" lastFinishedPulling="2026-03-20 16:23:53.402627353 +0000 UTC m=+2762.825330194" observedRunningTime="2026-03-20 16:23:54.264975706 +0000 UTC m=+2763.687678547" watchObservedRunningTime="2026-03-20 16:23:54.276267231 +0000 UTC m=+2763.698970072" Mar 20 16:23:54 crc kubenswrapper[4813]: I0320 16:23:54.631425 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:55 crc kubenswrapper[4813]: I0320 16:23:55.814108 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:57 crc kubenswrapper[4813]: I0320 16:23:57.031368 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:58 crc kubenswrapper[4813]: I0320 16:23:58.266495 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:23:59 crc kubenswrapper[4813]: I0320 16:23:59.266221 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:23:59 crc kubenswrapper[4813]: E0320 16:23:59.266604 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:23:59 crc kubenswrapper[4813]: I0320 16:23:59.454044 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.152464 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567064-nhjmb"] Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.154567 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567064-nhjmb" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.157959 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.162467 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.162643 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.186593 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567064-nhjmb"] Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.286002 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44xbj\" (UniqueName: \"kubernetes.io/projected/330a3487-131e-4ff0-884b-cf8df2e6dc66-kube-api-access-44xbj\") pod \"auto-csr-approver-29567064-nhjmb\" (UID: \"330a3487-131e-4ff0-884b-cf8df2e6dc66\") " pod="openshift-infra/auto-csr-approver-29567064-nhjmb" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.389297 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44xbj\" (UniqueName: \"kubernetes.io/projected/330a3487-131e-4ff0-884b-cf8df2e6dc66-kube-api-access-44xbj\") pod \"auto-csr-approver-29567064-nhjmb\" (UID: \"330a3487-131e-4ff0-884b-cf8df2e6dc66\") " pod="openshift-infra/auto-csr-approver-29567064-nhjmb" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.411995 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44xbj\" (UniqueName: \"kubernetes.io/projected/330a3487-131e-4ff0-884b-cf8df2e6dc66-kube-api-access-44xbj\") pod \"auto-csr-approver-29567064-nhjmb\" (UID: \"330a3487-131e-4ff0-884b-cf8df2e6dc66\") " pod="openshift-infra/auto-csr-approver-29567064-nhjmb" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.481515 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567064-nhjmb" Mar 20 16:24:00 crc kubenswrapper[4813]: I0320 16:24:00.642739 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:01 crc kubenswrapper[4813]: I0320 16:24:01.839651 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:03 crc kubenswrapper[4813]: I0320 16:24:03.033800 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:04 crc kubenswrapper[4813]: I0320 16:24:04.233052 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:05 crc kubenswrapper[4813]: I0320 16:24:05.399345 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:06 crc kubenswrapper[4813]: I0320 16:24:06.597068 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:07 crc kubenswrapper[4813]: I0320 16:24:07.800341 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:07 crc kubenswrapper[4813]: E0320 16:24:07.991815 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Mar 20 16:24:07 crc kubenswrapper[4813]: E0320 16:24:07.992071 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hgzlj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-h2pgr_watcher-kuttl-default(f13fe6e6-3a88-4d63-9726-d851cad85ecf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 16:24:07 crc kubenswrapper[4813]: E0320 16:24:07.993574 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" podUID="f13fe6e6-3a88-4d63-9726-d851cad85ecf" Mar 20 16:24:08 crc kubenswrapper[4813]: I0320 16:24:08.385978 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567064-nhjmb"] Mar 20 16:24:08 crc kubenswrapper[4813]: E0320 16:24:08.391591 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" podUID="f13fe6e6-3a88-4d63-9726-d851cad85ecf" Mar 20 16:24:09 crc kubenswrapper[4813]: I0320 16:24:09.070145 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:09 crc kubenswrapper[4813]: I0320 16:24:09.397870 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567064-nhjmb" event={"ID":"330a3487-131e-4ff0-884b-cf8df2e6dc66","Type":"ContainerStarted","Data":"81450e3d6ad1ce9f08bcd7b8a1e1ad241eac67a56c5d81c15ba14e490a2ad61b"} Mar 20 16:24:10 crc kubenswrapper[4813]: I0320 16:24:10.260743 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:10 crc kubenswrapper[4813]: I0320 16:24:10.411262 4813 generic.go:334] "Generic (PLEG): container finished" podID="330a3487-131e-4ff0-884b-cf8df2e6dc66" containerID="9c0c5ff090824f8ae74ef669507e524af6e8716bb1f25827cd1ec60ee89070ff" exitCode=0 Mar 20 16:24:10 crc kubenswrapper[4813]: I0320 16:24:10.411302 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567064-nhjmb" event={"ID":"330a3487-131e-4ff0-884b-cf8df2e6dc66","Type":"ContainerDied","Data":"9c0c5ff090824f8ae74ef669507e524af6e8716bb1f25827cd1ec60ee89070ff"} Mar 20 16:24:11 crc kubenswrapper[4813]: I0320 16:24:11.553256 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:11 crc kubenswrapper[4813]: I0320 16:24:11.816150 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567064-nhjmb" Mar 20 16:24:11 crc kubenswrapper[4813]: I0320 16:24:11.988346 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44xbj\" (UniqueName: \"kubernetes.io/projected/330a3487-131e-4ff0-884b-cf8df2e6dc66-kube-api-access-44xbj\") pod \"330a3487-131e-4ff0-884b-cf8df2e6dc66\" (UID: \"330a3487-131e-4ff0-884b-cf8df2e6dc66\") " Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.004776 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/330a3487-131e-4ff0-884b-cf8df2e6dc66-kube-api-access-44xbj" (OuterVolumeSpecName: "kube-api-access-44xbj") pod "330a3487-131e-4ff0-884b-cf8df2e6dc66" (UID: "330a3487-131e-4ff0-884b-cf8df2e6dc66"). InnerVolumeSpecName "kube-api-access-44xbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.090417 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44xbj\" (UniqueName: \"kubernetes.io/projected/330a3487-131e-4ff0-884b-cf8df2e6dc66-kube-api-access-44xbj\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.266347 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:24:12 crc kubenswrapper[4813]: E0320 16:24:12.266627 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.428242 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567064-nhjmb" event={"ID":"330a3487-131e-4ff0-884b-cf8df2e6dc66","Type":"ContainerDied","Data":"81450e3d6ad1ce9f08bcd7b8a1e1ad241eac67a56c5d81c15ba14e490a2ad61b"} Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.428284 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81450e3d6ad1ce9f08bcd7b8a1e1ad241eac67a56c5d81c15ba14e490a2ad61b" Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.428305 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567064-nhjmb" Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.728294 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.880073 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567058-nsmxd"] Mar 20 16:24:12 crc kubenswrapper[4813]: I0320 16:24:12.886266 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567058-nsmxd"] Mar 20 16:24:13 crc kubenswrapper[4813]: I0320 16:24:13.276794 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b36cb93-16d8-40e3-bff6-45a70579002e" path="/var/lib/kubelet/pods/8b36cb93-16d8-40e3-bff6-45a70579002e/volumes" Mar 20 16:24:13 crc kubenswrapper[4813]: I0320 16:24:13.919127 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:15 crc kubenswrapper[4813]: I0320 16:24:15.208398 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:16 crc kubenswrapper[4813]: I0320 16:24:16.451045 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:17 crc kubenswrapper[4813]: I0320 16:24:17.660513 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.697091 4813 scope.go:117] "RemoveContainer" containerID="b6684b5755c588d5eb87b616c9d881a382ef66d3a3f30a13dd2affca42917ffa" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.749061 4813 scope.go:117] "RemoveContainer" containerID="180af0d8f1c13811b7eacd608c555bdd520458a09c96dab5e89aea845c2544c4" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.792771 4813 scope.go:117] "RemoveContainer" containerID="a327420812e82c312a90374645a54160d24b34b0040c2e72a092d1f44fadc725" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.825017 4813 scope.go:117] "RemoveContainer" containerID="f6696a828ef2dc7ee0b5ea8697e1eed6d4d52c01afa0ad0ec2b23b1c7dc77382" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.830512 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.893576 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.948714 4813 scope.go:117] "RemoveContainer" containerID="778a3f275b2bfba5a450191f7949472cee90c5864640a023ec9066846956ccc9" Mar 20 16:24:18 crc kubenswrapper[4813]: I0320 16:24:18.974670 4813 scope.go:117] "RemoveContainer" containerID="41ecc9418f1c1817ac8bda9b30b0fe272103b0019c9a0f98c99f3c5c05a384e2" Mar 20 16:24:19 crc kubenswrapper[4813]: I0320 16:24:19.033428 4813 scope.go:117] "RemoveContainer" containerID="cc83545d0b4e417d30fcc50653d472fe3467a72515f2ec6f869451ef735e247e" Mar 20 16:24:19 crc kubenswrapper[4813]: I0320 16:24:19.073299 4813 scope.go:117] "RemoveContainer" containerID="0636ab555da1451d7cf1a4bc9bd1dad4be06b0d652f040639143e6d838022e33" Mar 20 16:24:19 crc kubenswrapper[4813]: I0320 16:24:19.109607 4813 scope.go:117] "RemoveContainer" containerID="b2a85925b98f2125ff57ed0715a506cd6ebf040b930251a7229e326ab6c5c65f" Mar 20 16:24:20 crc kubenswrapper[4813]: I0320 16:24:20.131390 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:20 crc kubenswrapper[4813]: I0320 16:24:20.495543 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" event={"ID":"f13fe6e6-3a88-4d63-9726-d851cad85ecf","Type":"ContainerStarted","Data":"7446e286e9891874d6503968b437f1847f285b7e0be45a3938f1a95f83a9d9eb"} Mar 20 16:24:20 crc kubenswrapper[4813]: I0320 16:24:20.520322 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" podStartSLOduration=2.261958613 podStartE2EDuration="30.520305137s" podCreationTimestamp="2026-03-20 16:23:50 +0000 UTC" firstStartedPulling="2026-03-20 16:23:51.452406035 +0000 UTC m=+2760.875108876" lastFinishedPulling="2026-03-20 16:24:19.710752559 +0000 UTC m=+2789.133455400" observedRunningTime="2026-03-20 16:24:20.515116127 +0000 UTC m=+2789.937818968" watchObservedRunningTime="2026-03-20 16:24:20.520305137 +0000 UTC m=+2789.943007978" Mar 20 16:24:21 crc kubenswrapper[4813]: I0320 16:24:21.374062 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:22 crc kubenswrapper[4813]: I0320 16:24:22.606543 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:23 crc kubenswrapper[4813]: I0320 16:24:23.869366 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:24 crc kubenswrapper[4813]: I0320 16:24:24.266546 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:24:24 crc kubenswrapper[4813]: E0320 16:24:24.266792 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:24:24 crc kubenswrapper[4813]: I0320 16:24:24.533892 4813 generic.go:334] "Generic (PLEG): container finished" podID="f13fe6e6-3a88-4d63-9726-d851cad85ecf" containerID="7446e286e9891874d6503968b437f1847f285b7e0be45a3938f1a95f83a9d9eb" exitCode=0 Mar 20 16:24:24 crc kubenswrapper[4813]: I0320 16:24:24.533940 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" event={"ID":"f13fe6e6-3a88-4d63-9726-d851cad85ecf","Type":"ContainerDied","Data":"7446e286e9891874d6503968b437f1847f285b7e0be45a3938f1a95f83a9d9eb"} Mar 20 16:24:25 crc kubenswrapper[4813]: I0320 16:24:25.119772 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:25 crc kubenswrapper[4813]: I0320 16:24:25.873883 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.031980 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-db-sync-config-data\") pod \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.032052 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f13fe6e6-3a88-4d63-9726-d851cad85ecf-etc-machine-id\") pod \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.032089 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-config-data\") pod \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.032129 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-combined-ca-bundle\") pod \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.032179 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgzlj\" (UniqueName: \"kubernetes.io/projected/f13fe6e6-3a88-4d63-9726-d851cad85ecf-kube-api-access-hgzlj\") pod \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.032221 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-scripts\") pod \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\" (UID: \"f13fe6e6-3a88-4d63-9726-d851cad85ecf\") " Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.032282 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f13fe6e6-3a88-4d63-9726-d851cad85ecf-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f13fe6e6-3a88-4d63-9726-d851cad85ecf" (UID: "f13fe6e6-3a88-4d63-9726-d851cad85ecf"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.032688 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f13fe6e6-3a88-4d63-9726-d851cad85ecf-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.037512 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f13fe6e6-3a88-4d63-9726-d851cad85ecf-kube-api-access-hgzlj" (OuterVolumeSpecName: "kube-api-access-hgzlj") pod "f13fe6e6-3a88-4d63-9726-d851cad85ecf" (UID: "f13fe6e6-3a88-4d63-9726-d851cad85ecf"). InnerVolumeSpecName "kube-api-access-hgzlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.037660 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-scripts" (OuterVolumeSpecName: "scripts") pod "f13fe6e6-3a88-4d63-9726-d851cad85ecf" (UID: "f13fe6e6-3a88-4d63-9726-d851cad85ecf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.038636 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f13fe6e6-3a88-4d63-9726-d851cad85ecf" (UID: "f13fe6e6-3a88-4d63-9726-d851cad85ecf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.069273 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f13fe6e6-3a88-4d63-9726-d851cad85ecf" (UID: "f13fe6e6-3a88-4d63-9726-d851cad85ecf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.077895 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-config-data" (OuterVolumeSpecName: "config-data") pod "f13fe6e6-3a88-4d63-9726-d851cad85ecf" (UID: "f13fe6e6-3a88-4d63-9726-d851cad85ecf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.135379 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.135424 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.140554 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.140606 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgzlj\" (UniqueName: \"kubernetes.io/projected/f13fe6e6-3a88-4d63-9726-d851cad85ecf-kube-api-access-hgzlj\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.140628 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f13fe6e6-3a88-4d63-9726-d851cad85ecf-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.326378 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.554688 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" event={"ID":"f13fe6e6-3a88-4d63-9726-d851cad85ecf","Type":"ContainerDied","Data":"8e13243791feb3456d383e1306963d68d25e0f05474e77dd21605a2869a2c719"} Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.554746 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e13243791feb3456d383e1306963d68d25e0f05474e77dd21605a2869a2c719" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.554758 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-db-sync-h2pgr" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.889022 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:26 crc kubenswrapper[4813]: E0320 16:24:26.889396 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="330a3487-131e-4ff0-884b-cf8df2e6dc66" containerName="oc" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.889406 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="330a3487-131e-4ff0-884b-cf8df2e6dc66" containerName="oc" Mar 20 16:24:26 crc kubenswrapper[4813]: E0320 16:24:26.889420 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13fe6e6-3a88-4d63-9726-d851cad85ecf" containerName="cinder-db-sync" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.889426 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13fe6e6-3a88-4d63-9726-d851cad85ecf" containerName="cinder-db-sync" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.889589 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="330a3487-131e-4ff0-884b-cf8df2e6dc66" containerName="oc" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.889602 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13fe6e6-3a88-4d63-9726-d851cad85ecf" containerName="cinder-db-sync" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.890466 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.892813 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-scripts" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.893035 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-config-data" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.894340 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-cinder-dockercfg-b8jff" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.894511 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-backup-config-data" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.899565 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.901167 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.903258 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-scheduler-config-data" Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.917063 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:26 crc kubenswrapper[4813]: I0320 16:24:26.931816 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055033 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-lib-modules\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055510 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fkb5\" (UniqueName: \"kubernetes.io/projected/1039c04c-ce23-4343-ac3b-9dabc852a749-kube-api-access-4fkb5\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055636 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-cert-memcached-mtls\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055813 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-scripts\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055875 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-sys\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055929 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055962 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.055997 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056051 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-run\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056113 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056192 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056222 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-scripts\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056244 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056265 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-dev\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056283 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056297 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1039c04c-ce23-4343-ac3b-9dabc852a749-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056317 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngmrd\" (UniqueName: \"kubernetes.io/projected/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-kube-api-access-ngmrd\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056336 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056353 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056370 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056396 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056416 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.056434 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-cert-memcached-mtls\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.131148 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.132353 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.142841 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-api-config-data" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.151165 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.157819 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-sys\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.157868 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.157888 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.157911 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.157955 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-sys\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.157981 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158023 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158028 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-scripts\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158062 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-run\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158119 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-run\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158179 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158191 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158241 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158293 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-scripts\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158264 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158314 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-cert-memcached-mtls\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158386 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158449 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158529 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-dev\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158475 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-dev\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158603 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.158622 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1039c04c-ce23-4343-ac3b-9dabc852a749-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159048 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngmrd\" (UniqueName: \"kubernetes.io/projected/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-kube-api-access-ngmrd\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159076 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l9jf\" (UniqueName: \"kubernetes.io/projected/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-kube-api-access-8l9jf\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159105 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159123 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159140 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159183 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159201 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-logs\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159234 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159256 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-cert-memcached-mtls\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159282 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159307 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159325 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data-custom\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159342 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-lib-modules\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159405 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fkb5\" (UniqueName: \"kubernetes.io/projected/1039c04c-ce23-4343-ac3b-9dabc852a749-kube-api-access-4fkb5\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159421 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-cert-memcached-mtls\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159447 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-scripts\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.159916 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1039c04c-ce23-4343-ac3b-9dabc852a749-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.160225 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.160284 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.168096 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-lib-modules\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.174350 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-scripts\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.175982 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.176740 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.177167 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-scripts\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.177577 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.177645 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.179896 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-cert-memcached-mtls\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.179957 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.185150 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.198995 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-cert-memcached-mtls\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.224955 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fkb5\" (UniqueName: \"kubernetes.io/projected/1039c04c-ce23-4343-ac3b-9dabc852a749-kube-api-access-4fkb5\") pod \"cinder-scheduler-0\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.228836 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.229992 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngmrd\" (UniqueName: \"kubernetes.io/projected/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-kube-api-access-ngmrd\") pod \"cinder-backup-0\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270466 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270561 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270589 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data-custom\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270657 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270680 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-scripts\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270716 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-cert-memcached-mtls\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270752 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l9jf\" (UniqueName: \"kubernetes.io/projected/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-kube-api-access-8l9jf\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.270787 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-logs\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.271200 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-logs\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.282160 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.282246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.296242 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-cert-memcached-mtls\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.297237 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.311089 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data-custom\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.311954 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-scripts\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.319051 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l9jf\" (UniqueName: \"kubernetes.io/projected/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-kube-api-access-8l9jf\") pod \"cinder-api-0\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.451012 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.503660 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.510332 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:27 crc kubenswrapper[4813]: I0320 16:24:27.870505 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:28 crc kubenswrapper[4813]: I0320 16:24:28.013121 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:28 crc kubenswrapper[4813]: W0320 16:24:28.015022 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f67d5c1_6d0c_45f5_a421_27b2bdd8e65c.slice/crio-1911ee244de8ae9fc9f451a09c32fcbc27e9ed86a7759792cc5228df7bda8dcd WatchSource:0}: Error finding container 1911ee244de8ae9fc9f451a09c32fcbc27e9ed86a7759792cc5228df7bda8dcd: Status 404 returned error can't find the container with id 1911ee244de8ae9fc9f451a09c32fcbc27e9ed86a7759792cc5228df7bda8dcd Mar 20 16:24:28 crc kubenswrapper[4813]: I0320 16:24:28.117830 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:28 crc kubenswrapper[4813]: W0320 16:24:28.123940 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf79cdf3_80b6_4b5c_a108_753f3e53ea6b.slice/crio-0a55dfb25de6f176def0650024fd3195dd33173b767b0243a3981b1c1dbc3183 WatchSource:0}: Error finding container 0a55dfb25de6f176def0650024fd3195dd33173b767b0243a3981b1c1dbc3183: Status 404 returned error can't find the container with id 0a55dfb25de6f176def0650024fd3195dd33173b767b0243a3981b1c1dbc3183 Mar 20 16:24:28 crc kubenswrapper[4813]: I0320 16:24:28.639538 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b","Type":"ContainerStarted","Data":"0a55dfb25de6f176def0650024fd3195dd33173b767b0243a3981b1c1dbc3183"} Mar 20 16:24:28 crc kubenswrapper[4813]: I0320 16:24:28.640470 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"1039c04c-ce23-4343-ac3b-9dabc852a749","Type":"ContainerStarted","Data":"05803a00f3b57992d23043617572b6c99a62e5774093dd2d01d50fbdb4b6c1c8"} Mar 20 16:24:28 crc kubenswrapper[4813]: I0320 16:24:28.641465 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c","Type":"ContainerStarted","Data":"1911ee244de8ae9fc9f451a09c32fcbc27e9ed86a7759792cc5228df7bda8dcd"} Mar 20 16:24:28 crc kubenswrapper[4813]: I0320 16:24:28.768712 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:29 crc kubenswrapper[4813]: I0320 16:24:29.198822 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:29 crc kubenswrapper[4813]: I0320 16:24:29.683669 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c","Type":"ContainerStarted","Data":"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867"} Mar 20 16:24:29 crc kubenswrapper[4813]: I0320 16:24:29.686058 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b","Type":"ContainerStarted","Data":"ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808"} Mar 20 16:24:29 crc kubenswrapper[4813]: I0320 16:24:29.690344 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"1039c04c-ce23-4343-ac3b-9dabc852a749","Type":"ContainerStarted","Data":"37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be"} Mar 20 16:24:29 crc kubenswrapper[4813]: I0320 16:24:29.952412 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.698435 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c","Type":"ContainerStarted","Data":"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4"} Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.698988 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.698734 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-api-0" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api-log" containerID="cri-o://bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867" gracePeriod=30 Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.699071 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-api-0" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api" containerID="cri-o://cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4" gracePeriod=30 Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.702854 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b","Type":"ContainerStarted","Data":"c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c"} Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.707952 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"1039c04c-ce23-4343-ac3b-9dabc852a749","Type":"ContainerStarted","Data":"aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4"} Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.722296 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinder-api-0" podStartSLOduration=3.722278804 podStartE2EDuration="3.722278804s" podCreationTimestamp="2026-03-20 16:24:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:24:30.720589539 +0000 UTC m=+2800.143292380" watchObservedRunningTime="2026-03-20 16:24:30.722278804 +0000 UTC m=+2800.144981645" Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.775843 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinder-scheduler-0" podStartSLOduration=3.972343764 podStartE2EDuration="4.775828889s" podCreationTimestamp="2026-03-20 16:24:26 +0000 UTC" firstStartedPulling="2026-03-20 16:24:27.872601411 +0000 UTC m=+2797.295304252" lastFinishedPulling="2026-03-20 16:24:28.676086536 +0000 UTC m=+2798.098789377" observedRunningTime="2026-03-20 16:24:30.751693638 +0000 UTC m=+2800.174396479" watchObservedRunningTime="2026-03-20 16:24:30.775828889 +0000 UTC m=+2800.198531730" Mar 20 16:24:30 crc kubenswrapper[4813]: I0320 16:24:30.779568 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinder-backup-0" podStartSLOduration=3.882054578 podStartE2EDuration="4.779559509s" podCreationTimestamp="2026-03-20 16:24:26 +0000 UTC" firstStartedPulling="2026-03-20 16:24:28.125795821 +0000 UTC m=+2797.548498662" lastFinishedPulling="2026-03-20 16:24:29.023300752 +0000 UTC m=+2798.446003593" observedRunningTime="2026-03-20 16:24:30.771129622 +0000 UTC m=+2800.193832473" watchObservedRunningTime="2026-03-20 16:24:30.779559509 +0000 UTC m=+2800.202262350" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.192204 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.361450 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454583 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-combined-ca-bundle\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454676 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-logs\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454699 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-scripts\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454758 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-etc-machine-id\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454779 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454795 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-cert-memcached-mtls\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454830 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l9jf\" (UniqueName: \"kubernetes.io/projected/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-kube-api-access-8l9jf\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454887 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data-custom\") pod \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\" (UID: \"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c\") " Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.454882 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.455205 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-logs" (OuterVolumeSpecName: "logs") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.456202 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.456980 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.473022 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-scripts" (OuterVolumeSpecName: "scripts") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.473599 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.473617 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-kube-api-access-8l9jf" (OuterVolumeSpecName: "kube-api-access-8l9jf") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "kube-api-access-8l9jf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.509201 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.515212 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data" (OuterVolumeSpecName: "config-data") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.559089 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.559112 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.559121 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l9jf\" (UniqueName: \"kubernetes.io/projected/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-kube-api-access-8l9jf\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.559131 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.559139 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.576727 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" (UID: "5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.660313 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.717179 4813 generic.go:334] "Generic (PLEG): container finished" podID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerID="cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4" exitCode=0 Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.717233 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.717260 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c","Type":"ContainerDied","Data":"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4"} Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.717306 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c","Type":"ContainerDied","Data":"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867"} Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.717238 4813 generic.go:334] "Generic (PLEG): container finished" podID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerID="bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867" exitCode=143 Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.717324 4813 scope.go:117] "RemoveContainer" containerID="cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.717391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c","Type":"ContainerDied","Data":"1911ee244de8ae9fc9f451a09c32fcbc27e9ed86a7759792cc5228df7bda8dcd"} Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.738805 4813 scope.go:117] "RemoveContainer" containerID="bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.761706 4813 scope.go:117] "RemoveContainer" containerID="cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4" Mar 20 16:24:31 crc kubenswrapper[4813]: E0320 16:24:31.763143 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4\": container with ID starting with cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4 not found: ID does not exist" containerID="cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.763268 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4"} err="failed to get container status \"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4\": rpc error: code = NotFound desc = could not find container \"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4\": container with ID starting with cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4 not found: ID does not exist" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.763392 4813 scope.go:117] "RemoveContainer" containerID="bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.767508 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:31 crc kubenswrapper[4813]: E0320 16:24:31.778729 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867\": container with ID starting with bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867 not found: ID does not exist" containerID="bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.778780 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867"} err="failed to get container status \"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867\": rpc error: code = NotFound desc = could not find container \"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867\": container with ID starting with bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867 not found: ID does not exist" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.778808 4813 scope.go:117] "RemoveContainer" containerID="cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.787796 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4"} err="failed to get container status \"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4\": rpc error: code = NotFound desc = could not find container \"cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4\": container with ID starting with cd97f9856508b259538160675571428e776e336c99a374950c21063544a3c2a4 not found: ID does not exist" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.788072 4813 scope.go:117] "RemoveContainer" containerID="bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.798986 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867"} err="failed to get container status \"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867\": rpc error: code = NotFound desc = could not find container \"bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867\": container with ID starting with bcfec34a64850794afe57400ad19ccddaf90d8b2fbda9974b6fa7df83d536867 not found: ID does not exist" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.805251 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.834614 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:31 crc kubenswrapper[4813]: E0320 16:24:31.835169 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.835233 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api" Mar 20 16:24:31 crc kubenswrapper[4813]: E0320 16:24:31.835289 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api-log" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.835337 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api-log" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.835617 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api-log" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.835698 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" containerName="cinder-api" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.836661 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.847667 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.858377 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-cinder-public-svc" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.858829 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-cinder-internal-svc" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.859130 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-api-config-data" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.967607 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-cert-memcached-mtls\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.967661 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.967796 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-scripts\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.967837 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/597daa3e-2492-4dce-87ca-58e26abac6da-etc-machine-id\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.967934 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597daa3e-2492-4dce-87ca-58e26abac6da-logs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.967988 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.968051 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-public-tls-certs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.968077 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data-custom\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.968113 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:31 crc kubenswrapper[4813]: I0320 16:24:31.968163 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvlgs\" (UniqueName: \"kubernetes.io/projected/597daa3e-2492-4dce-87ca-58e26abac6da-kube-api-access-vvlgs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.070080 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597daa3e-2492-4dce-87ca-58e26abac6da-logs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.070302 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.070388 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-public-tls-certs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.070460 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597daa3e-2492-4dce-87ca-58e26abac6da-logs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.070573 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data-custom\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.071066 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.071186 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvlgs\" (UniqueName: \"kubernetes.io/projected/597daa3e-2492-4dce-87ca-58e26abac6da-kube-api-access-vvlgs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.071285 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-cert-memcached-mtls\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.071373 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.071732 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-scripts\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.071821 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/597daa3e-2492-4dce-87ca-58e26abac6da-etc-machine-id\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.071932 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/597daa3e-2492-4dce-87ca-58e26abac6da-etc-machine-id\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.074252 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.076325 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.076393 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data-custom\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.076706 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.076713 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-cert-memcached-mtls\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.077019 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-public-tls-certs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.087860 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-scripts\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.088390 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvlgs\" (UniqueName: \"kubernetes.io/projected/597daa3e-2492-4dce-87ca-58e26abac6da-kube-api-access-vvlgs\") pod \"cinder-api-0\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.190759 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.230215 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.438846 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.511879 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.655595 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:32 crc kubenswrapper[4813]: W0320 16:24:32.657858 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod597daa3e_2492_4dce_87ca_58e26abac6da.slice/crio-5595f1f38a4a612286378f88e478201462c904ede7df01f2967f503c693a002f WatchSource:0}: Error finding container 5595f1f38a4a612286378f88e478201462c904ede7df01f2967f503c693a002f: Status 404 returned error can't find the container with id 5595f1f38a4a612286378f88e478201462c904ede7df01f2967f503c693a002f Mar 20 16:24:32 crc kubenswrapper[4813]: I0320 16:24:32.737453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"597daa3e-2492-4dce-87ca-58e26abac6da","Type":"ContainerStarted","Data":"5595f1f38a4a612286378f88e478201462c904ede7df01f2967f503c693a002f"} Mar 20 16:24:33 crc kubenswrapper[4813]: I0320 16:24:33.275869 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c" path="/var/lib/kubelet/pods/5f67d5c1-6d0c-45f5-a421-27b2bdd8e65c/volumes" Mar 20 16:24:33 crc kubenswrapper[4813]: I0320 16:24:33.644341 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:33 crc kubenswrapper[4813]: I0320 16:24:33.751323 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"597daa3e-2492-4dce-87ca-58e26abac6da","Type":"ContainerStarted","Data":"24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78"} Mar 20 16:24:34 crc kubenswrapper[4813]: I0320 16:24:34.773108 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"597daa3e-2492-4dce-87ca-58e26abac6da","Type":"ContainerStarted","Data":"bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632"} Mar 20 16:24:34 crc kubenswrapper[4813]: I0320 16:24:34.773567 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:34 crc kubenswrapper[4813]: I0320 16:24:34.803918 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinder-api-0" podStartSLOduration=3.803900629 podStartE2EDuration="3.803900629s" podCreationTimestamp="2026-03-20 16:24:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:24:34.793096697 +0000 UTC m=+2804.215799538" watchObservedRunningTime="2026-03-20 16:24:34.803900629 +0000 UTC m=+2804.226603470" Mar 20 16:24:34 crc kubenswrapper[4813]: I0320 16:24:34.894725 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:35 crc kubenswrapper[4813]: I0320 16:24:35.266458 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:24:35 crc kubenswrapper[4813]: I0320 16:24:35.789643 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"8ae1b78d7363e1a932bd9007ead346614378cdac238d4e7161c7374b208edff8"} Mar 20 16:24:36 crc kubenswrapper[4813]: I0320 16:24:36.120725 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:37 crc kubenswrapper[4813]: I0320 16:24:37.366624 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:37 crc kubenswrapper[4813]: I0320 16:24:37.438345 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:37 crc kubenswrapper[4813]: I0320 16:24:37.479200 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:37 crc kubenswrapper[4813]: I0320 16:24:37.804584 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:37 crc kubenswrapper[4813]: I0320 16:24:37.809926 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-scheduler-0" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="cinder-scheduler" containerID="cri-o://37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be" gracePeriod=30 Mar 20 16:24:37 crc kubenswrapper[4813]: I0320 16:24:37.809944 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-scheduler-0" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="probe" containerID="cri-o://aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4" gracePeriod=30 Mar 20 16:24:37 crc kubenswrapper[4813]: I0320 16:24:37.860722 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:38 crc kubenswrapper[4813]: I0320 16:24:38.562957 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:38 crc kubenswrapper[4813]: I0320 16:24:38.824647 4813 generic.go:334] "Generic (PLEG): container finished" podID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerID="aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4" exitCode=0 Mar 20 16:24:38 crc kubenswrapper[4813]: I0320 16:24:38.824727 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"1039c04c-ce23-4343-ac3b-9dabc852a749","Type":"ContainerDied","Data":"aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4"} Mar 20 16:24:38 crc kubenswrapper[4813]: I0320 16:24:38.825035 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-backup-0" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="cinder-backup" containerID="cri-o://ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808" gracePeriod=30 Mar 20 16:24:38 crc kubenswrapper[4813]: I0320 16:24:38.825231 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-backup-0" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="probe" containerID="cri-o://c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c" gracePeriod=30 Mar 20 16:24:39 crc kubenswrapper[4813]: I0320 16:24:39.101683 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:24:39 crc kubenswrapper[4813]: I0320 16:24:39.101928 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="e4987895-e3f1-4081-bee3-e2dbacf04dd1" containerName="watcher-decision-engine" containerID="cri-o://7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19" gracePeriod=30 Mar 20 16:24:39 crc kubenswrapper[4813]: I0320 16:24:39.810800 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:39 crc kubenswrapper[4813]: I0320 16:24:39.835273 4813 generic.go:334] "Generic (PLEG): container finished" podID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerID="c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c" exitCode=0 Mar 20 16:24:39 crc kubenswrapper[4813]: I0320 16:24:39.835314 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b","Type":"ContainerDied","Data":"c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c"} Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.211103 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.211808 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="sg-core" containerID="cri-o://7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953" gracePeriod=30 Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.211848 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-notification-agent" containerID="cri-o://3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a" gracePeriod=30 Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.212055 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="proxy-httpd" containerID="cri-o://ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd" gracePeriod=30 Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.214676 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-central-agent" containerID="cri-o://e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8" gracePeriod=30 Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.847445 4813 generic.go:334] "Generic (PLEG): container finished" podID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerID="ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd" exitCode=0 Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.848185 4813 generic.go:334] "Generic (PLEG): container finished" podID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerID="7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953" exitCode=2 Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.848208 4813 generic.go:334] "Generic (PLEG): container finished" podID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerID="e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8" exitCode=0 Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.847522 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerDied","Data":"ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd"} Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.848262 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerDied","Data":"7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953"} Mar 20 16:24:40 crc kubenswrapper[4813]: I0320 16:24:40.848290 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerDied","Data":"e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8"} Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.012447 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.375946 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xg78b"] Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.378007 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.387916 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xg78b"] Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.538455 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr89m\" (UniqueName: \"kubernetes.io/projected/787d2c28-25c5-44a4-9b44-c977489a30e9-kube-api-access-cr89m\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.538800 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-catalog-content\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.538897 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-utilities\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.640062 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr89m\" (UniqueName: \"kubernetes.io/projected/787d2c28-25c5-44a4-9b44-c977489a30e9-kube-api-access-cr89m\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.640117 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-catalog-content\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.640203 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-utilities\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.640678 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-utilities\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.640764 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-catalog-content\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.660551 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr89m\" (UniqueName: \"kubernetes.io/projected/787d2c28-25c5-44a4-9b44-c977489a30e9-kube-api-access-cr89m\") pod \"redhat-marketplace-xg78b\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.701078 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.831917 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.895948 4813 generic.go:334] "Generic (PLEG): container finished" podID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerID="3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a" exitCode=0 Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.896279 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerDied","Data":"3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a"} Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.896306 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"a7d010c8-b2f7-4e64-afe3-0abd085364a4","Type":"ContainerDied","Data":"98cc9a8acfbe4b453d65740e04fd91ef565a0ca661432a341d4a0b2ab772a869"} Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.896338 4813 scope.go:117] "RemoveContainer" containerID="ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.896532 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950031 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-combined-ca-bundle\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950123 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m64x6\" (UniqueName: \"kubernetes.io/projected/a7d010c8-b2f7-4e64-afe3-0abd085364a4-kube-api-access-m64x6\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950239 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-ceilometer-tls-certs\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950280 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-scripts\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950304 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-sg-core-conf-yaml\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950332 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-log-httpd\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950419 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-config-data\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.950445 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-run-httpd\") pod \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\" (UID: \"a7d010c8-b2f7-4e64-afe3-0abd085364a4\") " Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.951390 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.951893 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.955708 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-scripts" (OuterVolumeSpecName: "scripts") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.969714 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7d010c8-b2f7-4e64-afe3-0abd085364a4-kube-api-access-m64x6" (OuterVolumeSpecName: "kube-api-access-m64x6") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "kube-api-access-m64x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:41 crc kubenswrapper[4813]: I0320 16:24:41.994114 4813 scope.go:117] "RemoveContainer" containerID="7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.016829 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.018259 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.023643 4813 scope.go:117] "RemoveContainer" containerID="3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.052744 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.052780 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.052793 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.052810 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7d010c8-b2f7-4e64-afe3-0abd085364a4-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.052824 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m64x6\" (UniqueName: \"kubernetes.io/projected/a7d010c8-b2f7-4e64-afe3-0abd085364a4-kube-api-access-m64x6\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.052835 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.062171 4813 scope.go:117] "RemoveContainer" containerID="e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.086654 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.129221 4813 scope.go:117] "RemoveContainer" containerID="ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd" Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.131729 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd\": container with ID starting with ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd not found: ID does not exist" containerID="ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.131773 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd"} err="failed to get container status \"ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd\": rpc error: code = NotFound desc = could not find container \"ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd\": container with ID starting with ee02d6faa7bac7c983e7d803d37e6126ef4329049f30db894d0d4ed38c5a0fdd not found: ID does not exist" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.131801 4813 scope.go:117] "RemoveContainer" containerID="7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.134596 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-config-data" (OuterVolumeSpecName: "config-data") pod "a7d010c8-b2f7-4e64-afe3-0abd085364a4" (UID: "a7d010c8-b2f7-4e64-afe3-0abd085364a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.137642 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953\": container with ID starting with 7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953 not found: ID does not exist" containerID="7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.137678 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953"} err="failed to get container status \"7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953\": rpc error: code = NotFound desc = could not find container \"7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953\": container with ID starting with 7ccfb429032654cf195aeafbfdde6dddffed59b6bc79d2bc4a14b74f4d520953 not found: ID does not exist" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.137701 4813 scope.go:117] "RemoveContainer" containerID="3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a" Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.140540 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a\": container with ID starting with 3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a not found: ID does not exist" containerID="3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.140567 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a"} err="failed to get container status \"3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a\": rpc error: code = NotFound desc = could not find container \"3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a\": container with ID starting with 3bb6c7260c59cb55349b6c8e6c29cccb7ba7bddf5d9cb27986f2fbba6c1ffd7a not found: ID does not exist" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.140589 4813 scope.go:117] "RemoveContainer" containerID="e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8" Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.141060 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8\": container with ID starting with e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8 not found: ID does not exist" containerID="e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.141077 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8"} err="failed to get container status \"e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8\": rpc error: code = NotFound desc = could not find container \"e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8\": container with ID starting with e5175aa0c657449241d0103bf03af7688cabf7b5c28fb3fd8ec27e3c588b17f8 not found: ID does not exist" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.154296 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.154545 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7d010c8-b2f7-4e64-afe3-0abd085364a4-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.233714 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.269605 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.283381 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308174 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.308512 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-notification-agent" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308526 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-notification-agent" Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.308540 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="sg-core" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308546 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="sg-core" Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.308556 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-central-agent" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308563 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-central-agent" Mar 20 16:24:42 crc kubenswrapper[4813]: E0320 16:24:42.308579 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="proxy-httpd" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308585 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="proxy-httpd" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308911 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-central-agent" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308934 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="ceilometer-notification-agent" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308951 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="proxy-httpd" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.308959 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" containerName="sg-core" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.312108 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.315825 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.315996 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.316150 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.329451 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.345983 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xg78b"] Mar 20 16:24:42 crc kubenswrapper[4813]: W0320 16:24:42.351739 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod787d2c28_25c5_44a4_9b44_c977489a30e9.slice/crio-c9255e451143c5be74a2715b73b6c6bd2e48af2b46615836978b3727f90675b2 WatchSource:0}: Error finding container c9255e451143c5be74a2715b73b6c6bd2e48af2b46615836978b3727f90675b2: Status 404 returned error can't find the container with id c9255e451143c5be74a2715b73b6c6bd2e48af2b46615836978b3727f90675b2 Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.378389 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.462869 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-log-httpd\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.462951 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-config-data\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.463030 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.463053 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.463085 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.463117 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-scripts\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.463142 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-run-httpd\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.463174 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpkzn\" (UniqueName: \"kubernetes.io/projected/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-kube-api-access-rpkzn\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585121 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1039c04c-ce23-4343-ac3b-9dabc852a749-etc-machine-id\") pod \"1039c04c-ce23-4343-ac3b-9dabc852a749\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585200 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data-custom\") pod \"1039c04c-ce23-4343-ac3b-9dabc852a749\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585229 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-scripts\") pod \"1039c04c-ce23-4343-ac3b-9dabc852a749\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585271 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data\") pod \"1039c04c-ce23-4343-ac3b-9dabc852a749\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585294 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-combined-ca-bundle\") pod \"1039c04c-ce23-4343-ac3b-9dabc852a749\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585431 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fkb5\" (UniqueName: \"kubernetes.io/projected/1039c04c-ce23-4343-ac3b-9dabc852a749-kube-api-access-4fkb5\") pod \"1039c04c-ce23-4343-ac3b-9dabc852a749\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585471 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-cert-memcached-mtls\") pod \"1039c04c-ce23-4343-ac3b-9dabc852a749\" (UID: \"1039c04c-ce23-4343-ac3b-9dabc852a749\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.585986 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpkzn\" (UniqueName: \"kubernetes.io/projected/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-kube-api-access-rpkzn\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586045 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-log-httpd\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-config-data\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586173 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586194 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586226 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586228 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1039c04c-ce23-4343-ac3b-9dabc852a749-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1039c04c-ce23-4343-ac3b-9dabc852a749" (UID: "1039c04c-ce23-4343-ac3b-9dabc852a749"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586260 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-scripts\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586288 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-run-httpd\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586357 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1039c04c-ce23-4343-ac3b-9dabc852a749-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586697 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-log-httpd\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.586781 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-run-httpd\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.594418 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-scripts" (OuterVolumeSpecName: "scripts") pod "1039c04c-ce23-4343-ac3b-9dabc852a749" (UID: "1039c04c-ce23-4343-ac3b-9dabc852a749"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.599037 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.605151 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.605280 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1039c04c-ce23-4343-ac3b-9dabc852a749" (UID: "1039c04c-ce23-4343-ac3b-9dabc852a749"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.607395 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-scripts\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.620911 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.621325 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-config-data\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.621903 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1039c04c-ce23-4343-ac3b-9dabc852a749-kube-api-access-4fkb5" (OuterVolumeSpecName: "kube-api-access-4fkb5") pod "1039c04c-ce23-4343-ac3b-9dabc852a749" (UID: "1039c04c-ce23-4343-ac3b-9dabc852a749"). InnerVolumeSpecName "kube-api-access-4fkb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.631721 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpkzn\" (UniqueName: \"kubernetes.io/projected/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-kube-api-access-rpkzn\") pod \"ceilometer-0\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.670909 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.687747 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.687785 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.687800 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fkb5\" (UniqueName: \"kubernetes.io/projected/1039c04c-ce23-4343-ac3b-9dabc852a749-kube-api-access-4fkb5\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.691719 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1039c04c-ce23-4343-ac3b-9dabc852a749" (UID: "1039c04c-ce23-4343-ac3b-9dabc852a749"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.733701 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data" (OuterVolumeSpecName: "config-data") pod "1039c04c-ce23-4343-ac3b-9dabc852a749" (UID: "1039c04c-ce23-4343-ac3b-9dabc852a749"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.789674 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.789704 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.856905 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.872652 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "1039c04c-ce23-4343-ac3b-9dabc852a749" (UID: "1039c04c-ce23-4343-ac3b-9dabc852a749"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.892908 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-combined-ca-bundle\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.892966 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-lib-cinder\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.892986 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-scripts\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893019 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-cinder\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893044 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-cert-memcached-mtls\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893061 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-sys\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893076 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-iscsi\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893098 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-dev\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893127 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngmrd\" (UniqueName: \"kubernetes.io/projected/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-kube-api-access-ngmrd\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893202 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893240 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-run\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893261 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-lib-modules\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893285 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-brick\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893299 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data-custom\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893349 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-nvme\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893370 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-machine-id\") pod \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\" (UID: \"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b\") " Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893576 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-dev" (OuterVolumeSpecName: "dev") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.893991 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-cinder" (OuterVolumeSpecName: "var-locks-cinder") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "var-locks-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894050 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-lib-cinder" (OuterVolumeSpecName: "var-lib-cinder") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "var-lib-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894138 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894529 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1039c04c-ce23-4343-ac3b-9dabc852a749-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894547 4813 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-lib-modules\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894556 4813 reconciler_common.go:293] "Volume detached for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-lib-cinder\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894566 4813 reconciler_common.go:293] "Volume detached for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-cinder\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894574 4813 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-dev\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894631 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.894659 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.898151 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.898217 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.898241 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-run" (OuterVolumeSpecName: "run") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.898262 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-sys" (OuterVolumeSpecName: "sys") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.898851 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-scripts" (OuterVolumeSpecName: "scripts") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.898893 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.899422 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-kube-api-access-ngmrd" (OuterVolumeSpecName: "kube-api-access-ngmrd") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "kube-api-access-ngmrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.929095 4813 generic.go:334] "Generic (PLEG): container finished" podID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerID="ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808" exitCode=0 Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.929178 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b","Type":"ContainerDied","Data":"ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808"} Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.929211 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"bf79cdf3-80b6-4b5c-a108-753f3e53ea6b","Type":"ContainerDied","Data":"0a55dfb25de6f176def0650024fd3195dd33173b767b0243a3981b1c1dbc3183"} Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.929232 4813 scope.go:117] "RemoveContainer" containerID="c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.929391 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.954417 4813 generic.go:334] "Generic (PLEG): container finished" podID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerID="37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be" exitCode=0 Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.954544 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"1039c04c-ce23-4343-ac3b-9dabc852a749","Type":"ContainerDied","Data":"37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be"} Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.954577 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"1039c04c-ce23-4343-ac3b-9dabc852a749","Type":"ContainerDied","Data":"05803a00f3b57992d23043617572b6c99a62e5774093dd2d01d50fbdb4b6c1c8"} Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.954680 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.964663 4813 scope.go:117] "RemoveContainer" containerID="ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.965074 4813 generic.go:334] "Generic (PLEG): container finished" podID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerID="56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d" exitCode=0 Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.965114 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xg78b" event={"ID":"787d2c28-25c5-44a4-9b44-c977489a30e9","Type":"ContainerDied","Data":"56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d"} Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.965138 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xg78b" event={"ID":"787d2c28-25c5-44a4-9b44-c977489a30e9","Type":"ContainerStarted","Data":"c9255e451143c5be74a2715b73b6c6bd2e48af2b46615836978b3727f90675b2"} Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.990586 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996180 4813 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-run\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996209 4813 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-var-locks-brick\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996219 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996227 4813 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-nvme\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996236 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996244 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996252 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996263 4813 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-sys\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996273 4813 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-etc-iscsi\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:42 crc kubenswrapper[4813]: I0320 16:24:42.996280 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngmrd\" (UniqueName: \"kubernetes.io/projected/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-kube-api-access-ngmrd\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.012846 4813 scope.go:117] "RemoveContainer" containerID="c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c" Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.013395 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c\": container with ID starting with c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c not found: ID does not exist" containerID="c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.013440 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c"} err="failed to get container status \"c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c\": rpc error: code = NotFound desc = could not find container \"c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c\": container with ID starting with c8c9adc4c400a2f80b7f24326b55139bb48b90ea2a8e863dc39873d21f7d1b8c not found: ID does not exist" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.013467 4813 scope.go:117] "RemoveContainer" containerID="ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808" Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.013746 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808\": container with ID starting with ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808 not found: ID does not exist" containerID="ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.013764 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808"} err="failed to get container status \"ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808\": rpc error: code = NotFound desc = could not find container \"ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808\": container with ID starting with ce9d09b68f05cd84aa20ad3560b8c4f654a7419e9ee24a774e771f133d68e808 not found: ID does not exist" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.013778 4813 scope.go:117] "RemoveContainer" containerID="aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.072841 4813 scope.go:117] "RemoveContainer" containerID="37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.078264 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.107524 4813 scope.go:117] "RemoveContainer" containerID="aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.110566 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.112126 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4\": container with ID starting with aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4 not found: ID does not exist" containerID="aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.112177 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4"} err="failed to get container status \"aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4\": rpc error: code = NotFound desc = could not find container \"aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4\": container with ID starting with aafb60b8592901f710d23b0417bc472e523aff38847fae9538382f276c6ae7c4 not found: ID does not exist" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.112207 4813 scope.go:117] "RemoveContainer" containerID="37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be" Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.115769 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be\": container with ID starting with 37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be not found: ID does not exist" containerID="37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.115819 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be"} err="failed to get container status \"37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be\": rpc error: code = NotFound desc = could not find container \"37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be\": container with ID starting with 37f16eabb62ffd9ed2e90f46febd02d86092ae652024c73929b983ac6295c9be not found: ID does not exist" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.117633 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.119918 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="cinder-backup" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.119940 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="cinder-backup" Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.119958 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="probe" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.119964 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="probe" Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.119980 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="probe" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.119986 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="probe" Mar 20 16:24:43 crc kubenswrapper[4813]: E0320 16:24:43.120007 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="cinder-scheduler" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.120013 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="cinder-scheduler" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.120172 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="probe" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.120186 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="probe" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.120199 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" containerName="cinder-scheduler" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.120215 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" containerName="cinder-backup" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.121066 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.124885 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-scheduler-config-data" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.128237 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.135320 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data" (OuterVolumeSpecName: "config-data") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.168506 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" (UID: "bf79cdf3-80b6-4b5c-a108-753f3e53ea6b"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199125 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199197 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199357 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmqjl\" (UniqueName: \"kubernetes.io/projected/ffeb7950-784d-45db-8b4e-2e0ae2e71271-kube-api-access-cmqjl\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199466 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-cert-memcached-mtls\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199555 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb7950-784d-45db-8b4e-2e0ae2e71271-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199686 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199715 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199804 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.199816 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.200238 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.279947 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1039c04c-ce23-4343-ac3b-9dabc852a749" path="/var/lib/kubelet/pods/1039c04c-ce23-4343-ac3b-9dabc852a749/volumes" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.280847 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7d010c8-b2f7-4e64-afe3-0abd085364a4" path="/var/lib/kubelet/pods/a7d010c8-b2f7-4e64-afe3-0abd085364a4/volumes" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.301429 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-cert-memcached-mtls\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.301529 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb7950-784d-45db-8b4e-2e0ae2e71271-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.301625 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.301652 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.301721 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.301795 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.301833 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmqjl\" (UniqueName: \"kubernetes.io/projected/ffeb7950-784d-45db-8b4e-2e0ae2e71271-kube-api-access-cmqjl\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.305535 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb7950-784d-45db-8b4e-2e0ae2e71271-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.309439 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-cert-memcached-mtls\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.313038 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.314012 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.325270 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.325958 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmqjl\" (UniqueName: \"kubernetes.io/projected/ffeb7950-784d-45db-8b4e-2e0ae2e71271-kube-api-access-cmqjl\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.330362 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.334168 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.346123 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.368642 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.370493 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.375814 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cinder-backup-config-data" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.390806 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_e4987895-e3f1-4081-bee3-e2dbacf04dd1/watcher-decision-engine/0.log" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.399646 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403455 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403535 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403566 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403589 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rdxc\" (UniqueName: \"kubernetes.io/projected/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-kube-api-access-9rdxc\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403612 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-nvme\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403685 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403706 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-sys\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403734 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-run\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403755 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data-custom\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403778 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403822 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-lib-modules\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403844 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-dev\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403864 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403905 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-scripts\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403928 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-cert-memcached-mtls\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.403944 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.439047 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506404 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-run\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506521 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data-custom\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506557 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506636 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-lib-modules\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506689 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-dev\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506715 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506798 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-scripts\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.506861 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-cert-memcached-mtls\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.514565 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.514681 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.514819 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.514944 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.514973 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rdxc\" (UniqueName: \"kubernetes.io/projected/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-kube-api-access-9rdxc\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.514995 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-nvme\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.515144 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.515202 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-sys\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.516948 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-sys\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517268 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-nvme\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517287 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517332 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517357 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517379 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-lib-modules\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517399 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-run\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517802 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517836 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-dev\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.517885 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.525338 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-scripts\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.526115 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-cert-memcached-mtls\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.526639 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.527956 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data-custom\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.540820 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.543887 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rdxc\" (UniqueName: \"kubernetes.io/projected/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-kube-api-access-9rdxc\") pod \"cinder-backup-0\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.729070 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.782905 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.933347 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-custom-prometheus-ca\") pod \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.934374 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-cert-memcached-mtls\") pod \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.939430 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4987895-e3f1-4081-bee3-e2dbacf04dd1-logs" (OuterVolumeSpecName: "logs") pod "e4987895-e3f1-4081-bee3-e2dbacf04dd1" (UID: "e4987895-e3f1-4081-bee3-e2dbacf04dd1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.939839 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4987895-e3f1-4081-bee3-e2dbacf04dd1-logs\") pod \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.939888 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvkkw\" (UniqueName: \"kubernetes.io/projected/e4987895-e3f1-4081-bee3-e2dbacf04dd1-kube-api-access-wvkkw\") pod \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.940027 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-config-data\") pod \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.940110 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-combined-ca-bundle\") pod \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\" (UID: \"e4987895-e3f1-4081-bee3-e2dbacf04dd1\") " Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.940847 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4987895-e3f1-4081-bee3-e2dbacf04dd1-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.948697 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4987895-e3f1-4081-bee3-e2dbacf04dd1-kube-api-access-wvkkw" (OuterVolumeSpecName: "kube-api-access-wvkkw") pod "e4987895-e3f1-4081-bee3-e2dbacf04dd1" (UID: "e4987895-e3f1-4081-bee3-e2dbacf04dd1"). InnerVolumeSpecName "kube-api-access-wvkkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.990910 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:43 crc kubenswrapper[4813]: I0320 16:24:43.993175 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "e4987895-e3f1-4081-bee3-e2dbacf04dd1" (UID: "e4987895-e3f1-4081-bee3-e2dbacf04dd1"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.001892 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xg78b" event={"ID":"787d2c28-25c5-44a4-9b44-c977489a30e9","Type":"ContainerStarted","Data":"24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610"} Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.008292 4813 generic.go:334] "Generic (PLEG): container finished" podID="e4987895-e3f1-4081-bee3-e2dbacf04dd1" containerID="7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19" exitCode=0 Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.008352 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"e4987895-e3f1-4081-bee3-e2dbacf04dd1","Type":"ContainerDied","Data":"7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19"} Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.008376 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"e4987895-e3f1-4081-bee3-e2dbacf04dd1","Type":"ContainerDied","Data":"d012d1b2c19150907fce6c6e373ceab670a6063ecbfbd0799d39cf91fa2c65be"} Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.008391 4813 scope.go:117] "RemoveContainer" containerID="7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.008491 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.012399 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerStarted","Data":"40dee85b2b152162fb750d649c9eece5b5b6a852f2f04d9dcbc4d965f5914548"} Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.012435 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerStarted","Data":"9493f871747725b2e9a1c2a63fffb432a2a51253c6f50125abf7d0712b5e2456"} Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.012756 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4987895-e3f1-4081-bee3-e2dbacf04dd1" (UID: "e4987895-e3f1-4081-bee3-e2dbacf04dd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.042005 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.042034 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.042043 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvkkw\" (UniqueName: \"kubernetes.io/projected/e4987895-e3f1-4081-bee3-e2dbacf04dd1-kube-api-access-wvkkw\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.058634 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "e4987895-e3f1-4081-bee3-e2dbacf04dd1" (UID: "e4987895-e3f1-4081-bee3-e2dbacf04dd1"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.058784 4813 scope.go:117] "RemoveContainer" containerID="7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19" Mar 20 16:24:44 crc kubenswrapper[4813]: E0320 16:24:44.059351 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19\": container with ID starting with 7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19 not found: ID does not exist" containerID="7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.059396 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19"} err="failed to get container status \"7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19\": rpc error: code = NotFound desc = could not find container \"7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19\": container with ID starting with 7909eaee77d8880656053c5c2f670170f2d90e50f52482db24c2f30a63a91c19 not found: ID does not exist" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.064955 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-config-data" (OuterVolumeSpecName: "config-data") pod "e4987895-e3f1-4081-bee3-e2dbacf04dd1" (UID: "e4987895-e3f1-4081-bee3-e2dbacf04dd1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.143875 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.143910 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4987895-e3f1-4081-bee3-e2dbacf04dd1-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.254654 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:44 crc kubenswrapper[4813]: W0320 16:24:44.261584 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e791a9d_77b9_4d26_9260_62e6d9ce98a4.slice/crio-8ba105902c2cff36fb3eb69bb8359597196025e0ca406e8ae9a09e74bb6c4131 WatchSource:0}: Error finding container 8ba105902c2cff36fb3eb69bb8359597196025e0ca406e8ae9a09e74bb6c4131: Status 404 returned error can't find the container with id 8ba105902c2cff36fb3eb69bb8359597196025e0ca406e8ae9a09e74bb6c4131 Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.347473 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.353986 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.363590 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:24:44 crc kubenswrapper[4813]: E0320 16:24:44.363896 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4987895-e3f1-4081-bee3-e2dbacf04dd1" containerName="watcher-decision-engine" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.363913 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4987895-e3f1-4081-bee3-e2dbacf04dd1" containerName="watcher-decision-engine" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.364094 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4987895-e3f1-4081-bee3-e2dbacf04dd1" containerName="watcher-decision-engine" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.364635 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.367516 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.400605 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.452900 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.453197 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.453253 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.453287 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnksz\" (UniqueName: \"kubernetes.io/projected/954ddeb4-a252-4db6-93c8-7d7155e06e1b-kube-api-access-xnksz\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.453349 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954ddeb4-a252-4db6-93c8-7d7155e06e1b-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.453381 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.557695 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.563690 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnksz\" (UniqueName: \"kubernetes.io/projected/954ddeb4-a252-4db6-93c8-7d7155e06e1b-kube-api-access-xnksz\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.567205 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954ddeb4-a252-4db6-93c8-7d7155e06e1b-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.567682 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954ddeb4-a252-4db6-93c8-7d7155e06e1b-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.567943 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.568390 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.569249 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.571111 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.571807 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.572801 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.572857 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.593943 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnksz\" (UniqueName: \"kubernetes.io/projected/954ddeb4-a252-4db6-93c8-7d7155e06e1b-kube-api-access-xnksz\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.712142 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:44 crc kubenswrapper[4813]: I0320 16:24:44.735217 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.080545 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerStarted","Data":"bcb9356ba88adf134d5a8943252d6b298172f6ebad76e905a9061bf8f3ad3893"} Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.100718 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"ffeb7950-784d-45db-8b4e-2e0ae2e71271","Type":"ContainerStarted","Data":"d70ab795a16bdc9607f109d7c2545a43e437f4a0ffaae184b7d3b38dc90618b5"} Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.134058 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"9e791a9d-77b9-4d26-9260-62e6d9ce98a4","Type":"ContainerStarted","Data":"bd3aba632b9742afa3b3308b1c657aa27a433d6e7d353bda7b422176bf2ba4f9"} Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.134111 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"9e791a9d-77b9-4d26-9260-62e6d9ce98a4","Type":"ContainerStarted","Data":"8ba105902c2cff36fb3eb69bb8359597196025e0ca406e8ae9a09e74bb6c4131"} Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.189112 4813 generic.go:334] "Generic (PLEG): container finished" podID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerID="24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610" exitCode=0 Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.189151 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xg78b" event={"ID":"787d2c28-25c5-44a4-9b44-c977489a30e9","Type":"ContainerDied","Data":"24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610"} Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.287019 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf79cdf3-80b6-4b5c-a108-753f3e53ea6b" path="/var/lib/kubelet/pods/bf79cdf3-80b6-4b5c-a108-753f3e53ea6b/volumes" Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.287655 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4987895-e3f1-4081-bee3-e2dbacf04dd1" path="/var/lib/kubelet/pods/e4987895-e3f1-4081-bee3-e2dbacf04dd1/volumes" Mar 20 16:24:45 crc kubenswrapper[4813]: I0320 16:24:45.473259 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:24:45 crc kubenswrapper[4813]: W0320 16:24:45.516610 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod954ddeb4_a252_4db6_93c8_7d7155e06e1b.slice/crio-7b9e83d49d97f9d910bce6a92685a1a3fb566920c116ccbac79e0b9c78e5fbf9 WatchSource:0}: Error finding container 7b9e83d49d97f9d910bce6a92685a1a3fb566920c116ccbac79e0b9c78e5fbf9: Status 404 returned error can't find the container with id 7b9e83d49d97f9d910bce6a92685a1a3fb566920c116ccbac79e0b9c78e5fbf9 Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.210802 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"954ddeb4-a252-4db6-93c8-7d7155e06e1b","Type":"ContainerStarted","Data":"880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f"} Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.211312 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"954ddeb4-a252-4db6-93c8-7d7155e06e1b","Type":"ContainerStarted","Data":"7b9e83d49d97f9d910bce6a92685a1a3fb566920c116ccbac79e0b9c78e5fbf9"} Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.215747 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerStarted","Data":"eb2f3499d5713e3013e2ce1fd2dba305e1edfc34b4689c8c5854d53b04d0b9db"} Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.224878 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"ffeb7950-784d-45db-8b4e-2e0ae2e71271","Type":"ContainerStarted","Data":"4375ebef066e2c8c3e2c2e674c35ac5a2ef51e6847f83ad11da451d6408355e3"} Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.224920 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"ffeb7950-784d-45db-8b4e-2e0ae2e71271","Type":"ContainerStarted","Data":"8f62f23ab274838c98c5e319855966d412a6d36a4de29558a83c5f25df86f610"} Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.232608 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"9e791a9d-77b9-4d26-9260-62e6d9ce98a4","Type":"ContainerStarted","Data":"bafa35caf22b8c1033ee0871281435a9e8f3b43c17fa3eb7cf5ef68011dfd6b4"} Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.235562 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.235536617 podStartE2EDuration="2.235536617s" podCreationTimestamp="2026-03-20 16:24:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:24:46.229871064 +0000 UTC m=+2815.652573905" watchObservedRunningTime="2026-03-20 16:24:46.235536617 +0000 UTC m=+2815.658239458" Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.237343 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xg78b" event={"ID":"787d2c28-25c5-44a4-9b44-c977489a30e9","Type":"ContainerStarted","Data":"820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581"} Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.261632 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinder-scheduler-0" podStartSLOduration=3.26161588 podStartE2EDuration="3.26161588s" podCreationTimestamp="2026-03-20 16:24:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:24:46.25231685 +0000 UTC m=+2815.675019691" watchObservedRunningTime="2026-03-20 16:24:46.26161588 +0000 UTC m=+2815.684318721" Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.273470 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xg78b" podStartSLOduration=2.369876814 podStartE2EDuration="5.27345642s" podCreationTimestamp="2026-03-20 16:24:41 +0000 UTC" firstStartedPulling="2026-03-20 16:24:42.96864341 +0000 UTC m=+2812.391346251" lastFinishedPulling="2026-03-20 16:24:45.872223016 +0000 UTC m=+2815.294925857" observedRunningTime="2026-03-20 16:24:46.27233291 +0000 UTC m=+2815.695035751" watchObservedRunningTime="2026-03-20 16:24:46.27345642 +0000 UTC m=+2815.696159261" Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.312414 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinder-backup-0" podStartSLOduration=3.31240071 podStartE2EDuration="3.31240071s" podCreationTimestamp="2026-03-20 16:24:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:24:46.310806197 +0000 UTC m=+2815.733509038" watchObservedRunningTime="2026-03-20 16:24:46.31240071 +0000 UTC m=+2815.735103541" Mar 20 16:24:46 crc kubenswrapper[4813]: I0320 16:24:46.928447 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:48 crc kubenswrapper[4813]: I0320 16:24:48.067595 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:48 crc kubenswrapper[4813]: I0320 16:24:48.259439 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerStarted","Data":"3f2ed583b0f209368d294c88f746f04cdfd9b951b9cfe211bbffe99978cbfa8d"} Mar 20 16:24:48 crc kubenswrapper[4813]: I0320 16:24:48.260766 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:24:48 crc kubenswrapper[4813]: I0320 16:24:48.292903 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.792690729 podStartE2EDuration="6.292879266s" podCreationTimestamp="2026-03-20 16:24:42 +0000 UTC" firstStartedPulling="2026-03-20 16:24:43.19032843 +0000 UTC m=+2812.613031271" lastFinishedPulling="2026-03-20 16:24:47.690516967 +0000 UTC m=+2817.113219808" observedRunningTime="2026-03-20 16:24:48.282783004 +0000 UTC m=+2817.705485845" watchObservedRunningTime="2026-03-20 16:24:48.292879266 +0000 UTC m=+2817.715582107" Mar 20 16:24:48 crc kubenswrapper[4813]: I0320 16:24:48.440142 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:48 crc kubenswrapper[4813]: I0320 16:24:48.729857 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:49 crc kubenswrapper[4813]: I0320 16:24:49.297276 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:50 crc kubenswrapper[4813]: I0320 16:24:50.493061 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:51 crc kubenswrapper[4813]: I0320 16:24:51.701819 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:51 crc kubenswrapper[4813]: I0320 16:24:51.702729 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:51 crc kubenswrapper[4813]: I0320 16:24:51.702756 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:51 crc kubenswrapper[4813]: I0320 16:24:51.760355 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:52 crc kubenswrapper[4813]: I0320 16:24:52.339260 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:52 crc kubenswrapper[4813]: I0320 16:24:52.935622 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:53 crc kubenswrapper[4813]: I0320 16:24:53.654024 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:24:53 crc kubenswrapper[4813]: I0320 16:24:53.980749 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:24:54 crc kubenswrapper[4813]: I0320 16:24:54.138768 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:54 crc kubenswrapper[4813]: I0320 16:24:54.713887 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:54 crc kubenswrapper[4813]: I0320 16:24:54.745510 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.317967 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.347411 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.351611 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.377216 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xg78b"] Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.377585 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xg78b" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="registry-server" containerID="cri-o://820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581" gracePeriod=2 Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.876520 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.967587 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-utilities\") pod \"787d2c28-25c5-44a4-9b44-c977489a30e9\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.967690 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr89m\" (UniqueName: \"kubernetes.io/projected/787d2c28-25c5-44a4-9b44-c977489a30e9-kube-api-access-cr89m\") pod \"787d2c28-25c5-44a4-9b44-c977489a30e9\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.967849 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-catalog-content\") pod \"787d2c28-25c5-44a4-9b44-c977489a30e9\" (UID: \"787d2c28-25c5-44a4-9b44-c977489a30e9\") " Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.968566 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-utilities" (OuterVolumeSpecName: "utilities") pod "787d2c28-25c5-44a4-9b44-c977489a30e9" (UID: "787d2c28-25c5-44a4-9b44-c977489a30e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.972837 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/787d2c28-25c5-44a4-9b44-c977489a30e9-kube-api-access-cr89m" (OuterVolumeSpecName: "kube-api-access-cr89m") pod "787d2c28-25c5-44a4-9b44-c977489a30e9" (UID: "787d2c28-25c5-44a4-9b44-c977489a30e9"). InnerVolumeSpecName "kube-api-access-cr89m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:24:55 crc kubenswrapper[4813]: I0320 16:24:55.996178 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "787d2c28-25c5-44a4-9b44-c977489a30e9" (UID: "787d2c28-25c5-44a4-9b44-c977489a30e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.071602 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.071637 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/787d2c28-25c5-44a4-9b44-c977489a30e9-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.071646 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr89m\" (UniqueName: \"kubernetes.io/projected/787d2c28-25c5-44a4-9b44-c977489a30e9-kube-api-access-cr89m\") on node \"crc\" DevicePath \"\"" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.327706 4813 generic.go:334] "Generic (PLEG): container finished" podID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerID="820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581" exitCode=0 Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.328419 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xg78b" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.329661 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xg78b" event={"ID":"787d2c28-25c5-44a4-9b44-c977489a30e9","Type":"ContainerDied","Data":"820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581"} Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.329730 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xg78b" event={"ID":"787d2c28-25c5-44a4-9b44-c977489a30e9","Type":"ContainerDied","Data":"c9255e451143c5be74a2715b73b6c6bd2e48af2b46615836978b3727f90675b2"} Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.329753 4813 scope.go:117] "RemoveContainer" containerID="820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.370310 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xg78b"] Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.379972 4813 scope.go:117] "RemoveContainer" containerID="24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.386788 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xg78b"] Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.404183 4813 scope.go:117] "RemoveContainer" containerID="56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.453727 4813 scope.go:117] "RemoveContainer" containerID="820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581" Mar 20 16:24:56 crc kubenswrapper[4813]: E0320 16:24:56.454465 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581\": container with ID starting with 820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581 not found: ID does not exist" containerID="820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.454512 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581"} err="failed to get container status \"820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581\": rpc error: code = NotFound desc = could not find container \"820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581\": container with ID starting with 820550fb3d9e252b3b020791597694da1a9146e47ce09ace09f8f5495915a581 not found: ID does not exist" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.454553 4813 scope.go:117] "RemoveContainer" containerID="24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610" Mar 20 16:24:56 crc kubenswrapper[4813]: E0320 16:24:56.455434 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610\": container with ID starting with 24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610 not found: ID does not exist" containerID="24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.455530 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610"} err="failed to get container status \"24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610\": rpc error: code = NotFound desc = could not find container \"24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610\": container with ID starting with 24d783eb526ca63d1330c08be69683fb3899ec066b3d82ad5fd6d98419b27610 not found: ID does not exist" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.455589 4813 scope.go:117] "RemoveContainer" containerID="56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d" Mar 20 16:24:56 crc kubenswrapper[4813]: E0320 16:24:56.456108 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d\": container with ID starting with 56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d not found: ID does not exist" containerID="56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.456135 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d"} err="failed to get container status \"56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d\": rpc error: code = NotFound desc = could not find container \"56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d\": container with ID starting with 56a09dae9d20475257d56dbaa87289ee9ebd41e277fcf08f451055a074daef0d not found: ID does not exist" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.629722 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.860121 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.919118 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-db-sync-h2pgr"] Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.929524 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-db-sync-h2pgr"] Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.986296 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.986651 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-backup-0" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="cinder-backup" containerID="cri-o://bd3aba632b9742afa3b3308b1c657aa27a433d6e7d353bda7b422176bf2ba4f9" gracePeriod=30 Mar 20 16:24:56 crc kubenswrapper[4813]: I0320 16:24:56.987183 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-backup-0" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="probe" containerID="cri-o://bafa35caf22b8c1033ee0871281435a9e8f3b43c17fa3eb7cf5ef68011dfd6b4" gracePeriod=30 Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.003552 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.004112 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-scheduler-0" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="cinder-scheduler" containerID="cri-o://8f62f23ab274838c98c5e319855966d412a6d36a4de29558a83c5f25df86f610" gracePeriod=30 Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.004505 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-scheduler-0" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="probe" containerID="cri-o://4375ebef066e2c8c3e2c2e674c35ac5a2ef51e6847f83ad11da451d6408355e3" gracePeriod=30 Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.026698 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.026924 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-api-0" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api-log" containerID="cri-o://24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78" gracePeriod=30 Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.027337 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/cinder-api-0" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api" containerID="cri-o://bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632" gracePeriod=30 Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.051246 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/cinderff97-account-delete-7gp5x"] Mar 20 16:24:57 crc kubenswrapper[4813]: E0320 16:24:57.052127 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="extract-content" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.052208 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="extract-content" Mar 20 16:24:57 crc kubenswrapper[4813]: E0320 16:24:57.052282 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="extract-utilities" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.052341 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="extract-utilities" Mar 20 16:24:57 crc kubenswrapper[4813]: E0320 16:24:57.052409 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="registry-server" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.052466 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="registry-server" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.054012 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" containerName="registry-server" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.054670 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.056849 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinderff97-account-delete-7gp5x"] Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.196573 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1af0ea82-c3f9-484c-9cae-eab939d0b25a-operator-scripts\") pod \"cinderff97-account-delete-7gp5x\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.196810 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj4sx\" (UniqueName: \"kubernetes.io/projected/1af0ea82-c3f9-484c-9cae-eab939d0b25a-kube-api-access-cj4sx\") pod \"cinderff97-account-delete-7gp5x\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.274919 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="787d2c28-25c5-44a4-9b44-c977489a30e9" path="/var/lib/kubelet/pods/787d2c28-25c5-44a4-9b44-c977489a30e9/volumes" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.275898 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f13fe6e6-3a88-4d63-9726-d851cad85ecf" path="/var/lib/kubelet/pods/f13fe6e6-3a88-4d63-9726-d851cad85ecf/volumes" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.298754 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj4sx\" (UniqueName: \"kubernetes.io/projected/1af0ea82-c3f9-484c-9cae-eab939d0b25a-kube-api-access-cj4sx\") pod \"cinderff97-account-delete-7gp5x\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.298800 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1af0ea82-c3f9-484c-9cae-eab939d0b25a-operator-scripts\") pod \"cinderff97-account-delete-7gp5x\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.299645 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1af0ea82-c3f9-484c-9cae-eab939d0b25a-operator-scripts\") pod \"cinderff97-account-delete-7gp5x\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.320059 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj4sx\" (UniqueName: \"kubernetes.io/projected/1af0ea82-c3f9-484c-9cae-eab939d0b25a-kube-api-access-cj4sx\") pod \"cinderff97-account-delete-7gp5x\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.343075 4813 generic.go:334] "Generic (PLEG): container finished" podID="597daa3e-2492-4dce-87ca-58e26abac6da" containerID="24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78" exitCode=143 Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.343172 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"597daa3e-2492-4dce-87ca-58e26abac6da","Type":"ContainerDied","Data":"24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78"} Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.383251 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:24:57 crc kubenswrapper[4813]: I0320 16:24:57.852921 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/cinderff97-account-delete-7gp5x"] Mar 20 16:24:57 crc kubenswrapper[4813]: W0320 16:24:57.863560 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1af0ea82_c3f9_484c_9cae_eab939d0b25a.slice/crio-69c9ce028e40c7570dd2745d31717dae52d669788dfe654e9c07096d235b1e10 WatchSource:0}: Error finding container 69c9ce028e40c7570dd2745d31717dae52d669788dfe654e9c07096d235b1e10: Status 404 returned error can't find the container with id 69c9ce028e40c7570dd2745d31717dae52d669788dfe654e9c07096d235b1e10 Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.013232 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.352313 4813 generic.go:334] "Generic (PLEG): container finished" podID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerID="4375ebef066e2c8c3e2c2e674c35ac5a2ef51e6847f83ad11da451d6408355e3" exitCode=0 Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.352386 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"ffeb7950-784d-45db-8b4e-2e0ae2e71271","Type":"ContainerDied","Data":"4375ebef066e2c8c3e2c2e674c35ac5a2ef51e6847f83ad11da451d6408355e3"} Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.354981 4813 generic.go:334] "Generic (PLEG): container finished" podID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerID="bafa35caf22b8c1033ee0871281435a9e8f3b43c17fa3eb7cf5ef68011dfd6b4" exitCode=0 Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.355041 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"9e791a9d-77b9-4d26-9260-62e6d9ce98a4","Type":"ContainerDied","Data":"bafa35caf22b8c1033ee0871281435a9e8f3b43c17fa3eb7cf5ef68011dfd6b4"} Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.356430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" event={"ID":"1af0ea82-c3f9-484c-9cae-eab939d0b25a","Type":"ContainerStarted","Data":"736aa627dc10915297739f0789cb78486e4aabe6f68f487c06818c92b330b2ba"} Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.356462 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" event={"ID":"1af0ea82-c3f9-484c-9cae-eab939d0b25a","Type":"ContainerStarted","Data":"69c9ce028e40c7570dd2745d31717dae52d669788dfe654e9c07096d235b1e10"} Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.372395 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" podStartSLOduration=1.372376219 podStartE2EDuration="1.372376219s" podCreationTimestamp="2026-03-20 16:24:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:24:58.366951843 +0000 UTC m=+2827.789654684" watchObservedRunningTime="2026-03-20 16:24:58.372376219 +0000 UTC m=+2827.795079060" Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.792631 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:24:58 crc kubenswrapper[4813]: I0320 16:24:58.792846 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="954ddeb4-a252-4db6-93c8-7d7155e06e1b" containerName="watcher-decision-engine" containerID="cri-o://880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f" gracePeriod=30 Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.281919 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.365213 4813 generic.go:334] "Generic (PLEG): container finished" podID="1af0ea82-c3f9-484c-9cae-eab939d0b25a" containerID="736aa627dc10915297739f0789cb78486e4aabe6f68f487c06818c92b330b2ba" exitCode=0 Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.365393 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" event={"ID":"1af0ea82-c3f9-484c-9cae-eab939d0b25a","Type":"ContainerDied","Data":"736aa627dc10915297739f0789cb78486e4aabe6f68f487c06818c92b330b2ba"} Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.565335 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.565639 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-central-agent" containerID="cri-o://40dee85b2b152162fb750d649c9eece5b5b6a852f2f04d9dcbc4d965f5914548" gracePeriod=30 Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.565743 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="proxy-httpd" containerID="cri-o://3f2ed583b0f209368d294c88f746f04cdfd9b951b9cfe211bbffe99978cbfa8d" gracePeriod=30 Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.565775 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="sg-core" containerID="cri-o://eb2f3499d5713e3013e2ce1fd2dba305e1edfc34b4689c8c5854d53b04d0b9db" gracePeriod=30 Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.565811 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-notification-agent" containerID="cri-o://bcb9356ba88adf134d5a8943252d6b298172f6ebad76e905a9061bf8f3ad3893" gracePeriod=30 Mar 20 16:24:59 crc kubenswrapper[4813]: I0320 16:24:59.585691 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.376655 4813 generic.go:334] "Generic (PLEG): container finished" podID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerID="3f2ed583b0f209368d294c88f746f04cdfd9b951b9cfe211bbffe99978cbfa8d" exitCode=0 Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377009 4813 generic.go:334] "Generic (PLEG): container finished" podID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerID="eb2f3499d5713e3013e2ce1fd2dba305e1edfc34b4689c8c5854d53b04d0b9db" exitCode=2 Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377023 4813 generic.go:334] "Generic (PLEG): container finished" podID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerID="bcb9356ba88adf134d5a8943252d6b298172f6ebad76e905a9061bf8f3ad3893" exitCode=0 Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377033 4813 generic.go:334] "Generic (PLEG): container finished" podID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerID="40dee85b2b152162fb750d649c9eece5b5b6a852f2f04d9dcbc4d965f5914548" exitCode=0 Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.376728 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerDied","Data":"3f2ed583b0f209368d294c88f746f04cdfd9b951b9cfe211bbffe99978cbfa8d"} Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377112 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerDied","Data":"eb2f3499d5713e3013e2ce1fd2dba305e1edfc34b4689c8c5854d53b04d0b9db"} Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377127 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerDied","Data":"bcb9356ba88adf134d5a8943252d6b298172f6ebad76e905a9061bf8f3ad3893"} Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377140 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerDied","Data":"40dee85b2b152162fb750d649c9eece5b5b6a852f2f04d9dcbc4d965f5914548"} Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377182 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6","Type":"ContainerDied","Data":"9493f871747725b2e9a1c2a63fffb432a2a51253c6f50125abf7d0712b5e2456"} Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.377195 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9493f871747725b2e9a1c2a63fffb432a2a51253c6f50125abf7d0712b5e2456" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.412593 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.454060 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.462780 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/cinder-api-0" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.249:8776/healthcheck\": read tcp 10.217.0.2:50870->10.217.0.249:8776: read: connection reset by peer" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.554924 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-run-httpd\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.554989 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-sg-core-conf-yaml\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.555020 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-log-httpd\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.555058 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-scripts\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.555079 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-config-data\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.555144 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpkzn\" (UniqueName: \"kubernetes.io/projected/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-kube-api-access-rpkzn\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.555254 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-combined-ca-bundle\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.555276 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-ceilometer-tls-certs\") pod \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\" (UID: \"8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.556415 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.559694 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.562851 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-kube-api-access-rpkzn" (OuterVolumeSpecName: "kube-api-access-rpkzn") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "kube-api-access-rpkzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.563848 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-scripts" (OuterVolumeSpecName: "scripts") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.604293 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.629788 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.678086 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.678349 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.678471 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.679105 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.679121 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.679150 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpkzn\" (UniqueName: \"kubernetes.io/projected/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-kube-api-access-rpkzn\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.692516 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.695710 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-config-data" (OuterVolumeSpecName: "config-data") pod "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" (UID: "8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.771272 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.780089 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.780108 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.880625 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cj4sx\" (UniqueName: \"kubernetes.io/projected/1af0ea82-c3f9-484c-9cae-eab939d0b25a-kube-api-access-cj4sx\") pod \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.880743 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1af0ea82-c3f9-484c-9cae-eab939d0b25a-operator-scripts\") pod \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\" (UID: \"1af0ea82-c3f9-484c-9cae-eab939d0b25a\") " Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.882022 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1af0ea82-c3f9-484c-9cae-eab939d0b25a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1af0ea82-c3f9-484c-9cae-eab939d0b25a" (UID: "1af0ea82-c3f9-484c-9cae-eab939d0b25a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.884780 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1af0ea82-c3f9-484c-9cae-eab939d0b25a-kube-api-access-cj4sx" (OuterVolumeSpecName: "kube-api-access-cj4sx") pod "1af0ea82-c3f9-484c-9cae-eab939d0b25a" (UID: "1af0ea82-c3f9-484c-9cae-eab939d0b25a"). InnerVolumeSpecName "kube-api-access-cj4sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.982540 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1af0ea82-c3f9-484c-9cae-eab939d0b25a-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:00 crc kubenswrapper[4813]: I0320 16:25:00.982897 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cj4sx\" (UniqueName: \"kubernetes.io/projected/1af0ea82-c3f9-484c-9cae-eab939d0b25a-kube-api-access-cj4sx\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.026357 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185461 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-scripts\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185519 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/597daa3e-2492-4dce-87ca-58e26abac6da-etc-machine-id\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185549 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-public-tls-certs\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185614 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597daa3e-2492-4dce-87ca-58e26abac6da-logs\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185637 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data-custom\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185659 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-combined-ca-bundle\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185680 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-cert-memcached-mtls\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185712 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-internal-tls-certs\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185759 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvlgs\" (UniqueName: \"kubernetes.io/projected/597daa3e-2492-4dce-87ca-58e26abac6da-kube-api-access-vvlgs\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.185775 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data\") pod \"597daa3e-2492-4dce-87ca-58e26abac6da\" (UID: \"597daa3e-2492-4dce-87ca-58e26abac6da\") " Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.186947 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/597daa3e-2492-4dce-87ca-58e26abac6da-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.187571 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/597daa3e-2492-4dce-87ca-58e26abac6da-logs" (OuterVolumeSpecName: "logs") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.192834 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-scripts" (OuterVolumeSpecName: "scripts") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.193154 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/597daa3e-2492-4dce-87ca-58e26abac6da-kube-api-access-vvlgs" (OuterVolumeSpecName: "kube-api-access-vvlgs") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "kube-api-access-vvlgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.194436 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.214360 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.229556 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.231687 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data" (OuterVolumeSpecName: "config-data") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.236641 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.256380 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "597daa3e-2492-4dce-87ca-58e26abac6da" (UID: "597daa3e-2492-4dce-87ca-58e26abac6da"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287174 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvlgs\" (UniqueName: \"kubernetes.io/projected/597daa3e-2492-4dce-87ca-58e26abac6da-kube-api-access-vvlgs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287200 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287209 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287217 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/597daa3e-2492-4dce-87ca-58e26abac6da-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287225 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287233 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597daa3e-2492-4dce-87ca-58e26abac6da-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287241 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287249 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287257 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.287266 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597daa3e-2492-4dce-87ca-58e26abac6da-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.401679 4813 generic.go:334] "Generic (PLEG): container finished" podID="597daa3e-2492-4dce-87ca-58e26abac6da" containerID="bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632" exitCode=0 Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.401735 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"597daa3e-2492-4dce-87ca-58e26abac6da","Type":"ContainerDied","Data":"bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632"} Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.401761 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-api-0" event={"ID":"597daa3e-2492-4dce-87ca-58e26abac6da","Type":"ContainerDied","Data":"5595f1f38a4a612286378f88e478201462c904ede7df01f2967f503c693a002f"} Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.401776 4813 scope.go:117] "RemoveContainer" containerID="bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.401877 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-api-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.404659 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.409302 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.409830 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinderff97-account-delete-7gp5x" event={"ID":"1af0ea82-c3f9-484c-9cae-eab939d0b25a","Type":"ContainerDied","Data":"69c9ce028e40c7570dd2745d31717dae52d669788dfe654e9c07096d235b1e10"} Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.409869 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69c9ce028e40c7570dd2745d31717dae52d669788dfe654e9c07096d235b1e10" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.449506 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.453389 4813 scope.go:117] "RemoveContainer" containerID="24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.461605 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-api-0"] Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.468113 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.480555 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.482748 4813 scope.go:117] "RemoveContainer" containerID="bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.483228 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632\": container with ID starting with bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632 not found: ID does not exist" containerID="bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.483259 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632"} err="failed to get container status \"bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632\": rpc error: code = NotFound desc = could not find container \"bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632\": container with ID starting with bc97a5d45ff3631bc863b61f0525ad11a482e240126ac9c3cda9e365f541d632 not found: ID does not exist" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.483284 4813 scope.go:117] "RemoveContainer" containerID="24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.484646 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78\": container with ID starting with 24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78 not found: ID does not exist" containerID="24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.484676 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78"} err="failed to get container status \"24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78\": rpc error: code = NotFound desc = could not find container \"24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78\": container with ID starting with 24892bf7ba12bf5e4b48f947bf364040ae0ac18009b40722cef0468079ef2c78 not found: ID does not exist" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.492624 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.492978 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1af0ea82-c3f9-484c-9cae-eab939d0b25a" containerName="mariadb-account-delete" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.492992 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1af0ea82-c3f9-484c-9cae-eab939d0b25a" containerName="mariadb-account-delete" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.493006 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-notification-agent" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493012 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-notification-agent" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.493029 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api-log" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493035 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api-log" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.493045 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="proxy-httpd" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493050 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="proxy-httpd" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.493059 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493065 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.493072 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-central-agent" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493078 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-central-agent" Mar 20 16:25:01 crc kubenswrapper[4813]: E0320 16:25:01.493089 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="sg-core" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493094 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="sg-core" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493230 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="sg-core" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493244 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api-log" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493252 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-notification-agent" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493262 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" containerName="cinder-api" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493274 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="ceilometer-central-agent" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493283 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1af0ea82-c3f9-484c-9cae-eab939d0b25a" containerName="mariadb-account-delete" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.493292 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" containerName="proxy-httpd" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.494752 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.497928 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.498643 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.500189 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.501833 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.590953 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.591276 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.591298 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-config-data\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.591323 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-log-httpd\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.591341 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlwsh\" (UniqueName: \"kubernetes.io/projected/56100e5f-07eb-4a41-93aa-1958dde77551-kube-api-access-hlwsh\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.591552 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.591827 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-run-httpd\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.591957 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-scripts\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.636277 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.693751 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-scripts\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.693824 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.693851 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.693866 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-config-data\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.693893 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-log-httpd\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.693909 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlwsh\" (UniqueName: \"kubernetes.io/projected/56100e5f-07eb-4a41-93aa-1958dde77551-kube-api-access-hlwsh\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.693937 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.694000 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-run-httpd\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.694411 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-run-httpd\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.696055 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-log-httpd\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.700442 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.700698 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-config-data\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.700934 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-scripts\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.705038 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.707724 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.714425 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlwsh\" (UniqueName: \"kubernetes.io/projected/56100e5f-07eb-4a41-93aa-1958dde77551-kube-api-access-hlwsh\") pod \"ceilometer-0\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:01 crc kubenswrapper[4813]: I0320 16:25:01.824181 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.072620 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-db-create-pmsn7"] Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.085259 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-db-create-pmsn7"] Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.094267 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-ff97-account-create-update-6wll2"] Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.105362 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinderff97-account-delete-7gp5x"] Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.114337 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-ff97-account-create-update-6wll2"] Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.120598 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinderff97-account-delete-7gp5x"] Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.324854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.417748 4813 generic.go:334] "Generic (PLEG): container finished" podID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerID="8f62f23ab274838c98c5e319855966d412a6d36a4de29558a83c5f25df86f610" exitCode=0 Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.417840 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"ffeb7950-784d-45db-8b4e-2e0ae2e71271","Type":"ContainerDied","Data":"8f62f23ab274838c98c5e319855966d412a6d36a4de29558a83c5f25df86f610"} Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.419561 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerStarted","Data":"6c526145af8e56cd03915d3c9bc5a5f5dc0a4e4f822400e7202754b1f035dd19"} Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.423921 4813 generic.go:334] "Generic (PLEG): container finished" podID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerID="bd3aba632b9742afa3b3308b1c657aa27a433d6e7d353bda7b422176bf2ba4f9" exitCode=0 Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.423949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"9e791a9d-77b9-4d26-9260-62e6d9ce98a4","Type":"ContainerDied","Data":"bd3aba632b9742afa3b3308b1c657aa27a433d6e7d353bda7b422176bf2ba4f9"} Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.501351 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623670 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-lib-modules\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623705 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-brick\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623734 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-scripts\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623778 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623795 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623811 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-cinder\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623887 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rdxc\" (UniqueName: \"kubernetes.io/projected/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-kube-api-access-9rdxc\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623908 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-sys\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623932 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-run\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623889 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-cinder" (OuterVolumeSpecName: "var-locks-cinder") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "var-locks-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.623965 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-machine-id\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624007 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-sys" (OuterVolumeSpecName: "sys") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624035 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-run" (OuterVolumeSpecName: "run") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624051 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624068 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-cert-memcached-mtls\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624090 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-lib-cinder\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624107 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data-custom\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624152 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-iscsi\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624174 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-combined-ca-bundle\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624205 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-nvme\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624219 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-dev\") pod \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\" (UID: \"9e791a9d-77b9-4d26-9260-62e6d9ce98a4\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624816 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624839 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-lib-cinder" (OuterVolumeSpecName: "var-lib-cinder") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "var-lib-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624850 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624863 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624916 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-dev" (OuterVolumeSpecName: "dev") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624957 4813 reconciler_common.go:293] "Volume detached for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-cinder\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624968 4813 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-sys\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624976 4813 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-run\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624987 4813 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-lib-modules\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.624995 4813 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-locks-brick\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.629166 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.629950 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-kube-api-access-9rdxc" (OuterVolumeSpecName: "kube-api-access-9rdxc") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "kube-api-access-9rdxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.630117 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-scripts" (OuterVolumeSpecName: "scripts") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.678851 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.734975 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data" (OuterVolumeSpecName: "config-data") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735759 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735789 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rdxc\" (UniqueName: \"kubernetes.io/projected/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-kube-api-access-9rdxc\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735799 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735808 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735817 4813 reconciler_common.go:293] "Volume detached for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-var-lib-cinder\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735828 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735859 4813 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-iscsi\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735868 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735876 4813 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-etc-nvme\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.735884 4813 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-dev\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.811455 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.817348 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "9e791a9d-77b9-4d26-9260-62e6d9ce98a4" (UID: "9e791a9d-77b9-4d26-9260-62e6d9ce98a4"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.836774 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/9e791a9d-77b9-4d26-9260-62e6d9ce98a4-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.898606 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939237 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb7950-784d-45db-8b4e-2e0ae2e71271-etc-machine-id\") pod \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939287 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-combined-ca-bundle\") pod \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939361 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ffeb7950-784d-45db-8b4e-2e0ae2e71271-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ffeb7950-784d-45db-8b4e-2e0ae2e71271" (UID: "ffeb7950-784d-45db-8b4e-2e0ae2e71271"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939411 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-scripts\") pod \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939442 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data-custom\") pod \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939512 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data\") pod \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939591 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmqjl\" (UniqueName: \"kubernetes.io/projected/ffeb7950-784d-45db-8b4e-2e0ae2e71271-kube-api-access-cmqjl\") pod \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.939636 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-cert-memcached-mtls\") pod \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\" (UID: \"ffeb7950-784d-45db-8b4e-2e0ae2e71271\") " Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.940061 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb7950-784d-45db-8b4e-2e0ae2e71271-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.952670 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ffeb7950-784d-45db-8b4e-2e0ae2e71271" (UID: "ffeb7950-784d-45db-8b4e-2e0ae2e71271"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.956360 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-scripts" (OuterVolumeSpecName: "scripts") pod "ffeb7950-784d-45db-8b4e-2e0ae2e71271" (UID: "ffeb7950-784d-45db-8b4e-2e0ae2e71271"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:02 crc kubenswrapper[4813]: I0320 16:25:02.960892 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffeb7950-784d-45db-8b4e-2e0ae2e71271-kube-api-access-cmqjl" (OuterVolumeSpecName: "kube-api-access-cmqjl") pod "ffeb7950-784d-45db-8b4e-2e0ae2e71271" (UID: "ffeb7950-784d-45db-8b4e-2e0ae2e71271"). InnerVolumeSpecName "kube-api-access-cmqjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.011376 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffeb7950-784d-45db-8b4e-2e0ae2e71271" (UID: "ffeb7950-784d-45db-8b4e-2e0ae2e71271"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.046676 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmqjl\" (UniqueName: \"kubernetes.io/projected/ffeb7950-784d-45db-8b4e-2e0ae2e71271-kube-api-access-cmqjl\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.046726 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.046738 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.046750 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.118812 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "ffeb7950-784d-45db-8b4e-2e0ae2e71271" (UID: "ffeb7950-784d-45db-8b4e-2e0ae2e71271"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.122254 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data" (OuterVolumeSpecName: "config-data") pod "ffeb7950-784d-45db-8b4e-2e0ae2e71271" (UID: "ffeb7950-784d-45db-8b4e-2e0ae2e71271"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.148853 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.148903 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb7950-784d-45db-8b4e-2e0ae2e71271-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.276440 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1af0ea82-c3f9-484c-9cae-eab939d0b25a" path="/var/lib/kubelet/pods/1af0ea82-c3f9-484c-9cae-eab939d0b25a/volumes" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.277333 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb55726-1843-49ef-904e-dfb5d7ea3d00" path="/var/lib/kubelet/pods/4bb55726-1843-49ef-904e-dfb5d7ea3d00/volumes" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.277975 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="597daa3e-2492-4dce-87ca-58e26abac6da" path="/var/lib/kubelet/pods/597daa3e-2492-4dce-87ca-58e26abac6da/volumes" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.279107 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63b748b2-8139-45e5-b2b5-42b9eacaff48" path="/var/lib/kubelet/pods/63b748b2-8139-45e5-b2b5-42b9eacaff48/volumes" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.279760 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6" path="/var/lib/kubelet/pods/8e3b5eaf-e2a8-43aa-a452-f2f0cbe2a8a6/volumes" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.433686 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-scheduler-0" event={"ID":"ffeb7950-784d-45db-8b4e-2e0ae2e71271","Type":"ContainerDied","Data":"d70ab795a16bdc9607f109d7c2545a43e437f4a0ffaae184b7d3b38dc90618b5"} Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.433744 4813 scope.go:117] "RemoveContainer" containerID="4375ebef066e2c8c3e2c2e674c35ac5a2ef51e6847f83ad11da451d6408355e3" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.433857 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-scheduler-0" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.439876 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerStarted","Data":"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2"} Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.442150 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/cinder-backup-0" event={"ID":"9e791a9d-77b9-4d26-9260-62e6d9ce98a4","Type":"ContainerDied","Data":"8ba105902c2cff36fb3eb69bb8359597196025e0ca406e8ae9a09e74bb6c4131"} Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.442251 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/cinder-backup-0" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.457306 4813 scope.go:117] "RemoveContainer" containerID="8f62f23ab274838c98c5e319855966d412a6d36a4de29558a83c5f25df86f610" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.462824 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.471417 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-scheduler-0"] Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.486820 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.487955 4813 scope.go:117] "RemoveContainer" containerID="bafa35caf22b8c1033ee0871281435a9e8f3b43c17fa3eb7cf5ef68011dfd6b4" Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.497603 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/cinder-backup-0"] Mar 20 16:25:03 crc kubenswrapper[4813]: I0320 16:25:03.506649 4813 scope.go:117] "RemoveContainer" containerID="bd3aba632b9742afa3b3308b1c657aa27a433d6e7d353bda7b422176bf2ba4f9" Mar 20 16:25:04 crc kubenswrapper[4813]: I0320 16:25:04.013178 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:25:04 crc kubenswrapper[4813]: I0320 16:25:04.453587 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerStarted","Data":"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50"} Mar 20 16:25:05 crc kubenswrapper[4813]: I0320 16:25:05.224045 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:25:05 crc kubenswrapper[4813]: I0320 16:25:05.274005 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" path="/var/lib/kubelet/pods/9e791a9d-77b9-4d26-9260-62e6d9ce98a4/volumes" Mar 20 16:25:05 crc kubenswrapper[4813]: I0320 16:25:05.274687 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" path="/var/lib/kubelet/pods/ffeb7950-784d-45db-8b4e-2e0ae2e71271/volumes" Mar 20 16:25:05 crc kubenswrapper[4813]: I0320 16:25:05.467320 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerStarted","Data":"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2"} Mar 20 16:25:06 crc kubenswrapper[4813]: I0320 16:25:06.401845 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:25:07 crc kubenswrapper[4813]: I0320 16:25:07.484557 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerStarted","Data":"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056"} Mar 20 16:25:07 crc kubenswrapper[4813]: I0320 16:25:07.485906 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:07 crc kubenswrapper[4813]: I0320 16:25:07.511062 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.042294415 podStartE2EDuration="6.511039382s" podCreationTimestamp="2026-03-20 16:25:01 +0000 UTC" firstStartedPulling="2026-03-20 16:25:02.365184799 +0000 UTC m=+2831.787887640" lastFinishedPulling="2026-03-20 16:25:06.833929766 +0000 UTC m=+2836.256632607" observedRunningTime="2026-03-20 16:25:07.503440367 +0000 UTC m=+2836.926143258" watchObservedRunningTime="2026-03-20 16:25:07.511039382 +0000 UTC m=+2836.933742223" Mar 20 16:25:07 crc kubenswrapper[4813]: I0320 16:25:07.578940 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_954ddeb4-a252-4db6-93c8-7d7155e06e1b/watcher-decision-engine/0.log" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.200639 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.247651 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954ddeb4-a252-4db6-93c8-7d7155e06e1b-logs\") pod \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.248022 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-custom-prometheus-ca\") pod \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.248081 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-config-data\") pod \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.248108 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-cert-memcached-mtls\") pod \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.248146 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-combined-ca-bundle\") pod \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.248193 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnksz\" (UniqueName: \"kubernetes.io/projected/954ddeb4-a252-4db6-93c8-7d7155e06e1b-kube-api-access-xnksz\") pod \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\" (UID: \"954ddeb4-a252-4db6-93c8-7d7155e06e1b\") " Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.248196 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/954ddeb4-a252-4db6-93c8-7d7155e06e1b-logs" (OuterVolumeSpecName: "logs") pod "954ddeb4-a252-4db6-93c8-7d7155e06e1b" (UID: "954ddeb4-a252-4db6-93c8-7d7155e06e1b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.248628 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954ddeb4-a252-4db6-93c8-7d7155e06e1b-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.264880 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/954ddeb4-a252-4db6-93c8-7d7155e06e1b-kube-api-access-xnksz" (OuterVolumeSpecName: "kube-api-access-xnksz") pod "954ddeb4-a252-4db6-93c8-7d7155e06e1b" (UID: "954ddeb4-a252-4db6-93c8-7d7155e06e1b"). InnerVolumeSpecName "kube-api-access-xnksz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.277214 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "954ddeb4-a252-4db6-93c8-7d7155e06e1b" (UID: "954ddeb4-a252-4db6-93c8-7d7155e06e1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.324589 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-config-data" (OuterVolumeSpecName: "config-data") pod "954ddeb4-a252-4db6-93c8-7d7155e06e1b" (UID: "954ddeb4-a252-4db6-93c8-7d7155e06e1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.331452 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "954ddeb4-a252-4db6-93c8-7d7155e06e1b" (UID: "954ddeb4-a252-4db6-93c8-7d7155e06e1b"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.344434 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "954ddeb4-a252-4db6-93c8-7d7155e06e1b" (UID: "954ddeb4-a252-4db6-93c8-7d7155e06e1b"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.350811 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.350835 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.350844 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.350855 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954ddeb4-a252-4db6-93c8-7d7155e06e1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.350863 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnksz\" (UniqueName: \"kubernetes.io/projected/954ddeb4-a252-4db6-93c8-7d7155e06e1b-kube-api-access-xnksz\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.494975 4813 generic.go:334] "Generic (PLEG): container finished" podID="954ddeb4-a252-4db6-93c8-7d7155e06e1b" containerID="880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f" exitCode=0 Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.495593 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.499613 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"954ddeb4-a252-4db6-93c8-7d7155e06e1b","Type":"ContainerDied","Data":"880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f"} Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.499658 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"954ddeb4-a252-4db6-93c8-7d7155e06e1b","Type":"ContainerDied","Data":"7b9e83d49d97f9d910bce6a92685a1a3fb566920c116ccbac79e0b9c78e5fbf9"} Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.499679 4813 scope.go:117] "RemoveContainer" containerID="880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.530072 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.554433 4813 scope.go:117] "RemoveContainer" containerID="880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.557978 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:08 crc kubenswrapper[4813]: E0320 16:25:08.558136 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f\": container with ID starting with 880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f not found: ID does not exist" containerID="880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.558178 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f"} err="failed to get container status \"880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f\": rpc error: code = NotFound desc = could not find container \"880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f\": container with ID starting with 880248f562f9f1f38bec163917ad81f307bb44f3ee7d6c867830a34f5574113f not found: ID does not exist" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580124 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:08 crc kubenswrapper[4813]: E0320 16:25:08.580583 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="cinder-backup" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580613 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="cinder-backup" Mar 20 16:25:08 crc kubenswrapper[4813]: E0320 16:25:08.580625 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="954ddeb4-a252-4db6-93c8-7d7155e06e1b" containerName="watcher-decision-engine" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580633 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="954ddeb4-a252-4db6-93c8-7d7155e06e1b" containerName="watcher-decision-engine" Mar 20 16:25:08 crc kubenswrapper[4813]: E0320 16:25:08.580649 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="probe" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580657 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="probe" Mar 20 16:25:08 crc kubenswrapper[4813]: E0320 16:25:08.580675 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="probe" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580682 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="probe" Mar 20 16:25:08 crc kubenswrapper[4813]: E0320 16:25:08.580696 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="cinder-scheduler" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580703 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="cinder-scheduler" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580910 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="probe" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580926 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="probe" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580935 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffeb7950-784d-45db-8b4e-2e0ae2e71271" containerName="cinder-scheduler" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580949 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e791a9d-77b9-4d26-9260-62e6d9ce98a4" containerName="cinder-backup" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.580965 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="954ddeb4-a252-4db6-93c8-7d7155e06e1b" containerName="watcher-decision-engine" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.583225 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.586248 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.589414 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.762208 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.762267 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.762286 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.762305 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqkkf\" (UniqueName: \"kubernetes.io/projected/0768a899-8040-4a9d-bc42-107e17bf5e79-kube-api-access-mqkkf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.762388 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0768a899-8040-4a9d-bc42-107e17bf5e79-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.762474 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.863672 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.863726 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.863746 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqkkf\" (UniqueName: \"kubernetes.io/projected/0768a899-8040-4a9d-bc42-107e17bf5e79-kube-api-access-mqkkf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.863795 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0768a899-8040-4a9d-bc42-107e17bf5e79-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.863870 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.863916 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.864308 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0768a899-8040-4a9d-bc42-107e17bf5e79-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.866879 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.866997 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.868111 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.868592 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.879701 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqkkf\" (UniqueName: \"kubernetes.io/projected/0768a899-8040-4a9d-bc42-107e17bf5e79-kube-api-access-mqkkf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:08 crc kubenswrapper[4813]: I0320 16:25:08.912790 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:09 crc kubenswrapper[4813]: I0320 16:25:09.275126 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="954ddeb4-a252-4db6-93c8-7d7155e06e1b" path="/var/lib/kubelet/pods/954ddeb4-a252-4db6-93c8-7d7155e06e1b/volumes" Mar 20 16:25:09 crc kubenswrapper[4813]: I0320 16:25:09.371272 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:09 crc kubenswrapper[4813]: W0320 16:25:09.380397 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0768a899_8040_4a9d_bc42_107e17bf5e79.slice/crio-21d11cf7b3e0e54508032daf2538999c73fcdff9abb5c79c2e18b5c33137d3e4 WatchSource:0}: Error finding container 21d11cf7b3e0e54508032daf2538999c73fcdff9abb5c79c2e18b5c33137d3e4: Status 404 returned error can't find the container with id 21d11cf7b3e0e54508032daf2538999c73fcdff9abb5c79c2e18b5c33137d3e4 Mar 20 16:25:09 crc kubenswrapper[4813]: I0320 16:25:09.505305 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"0768a899-8040-4a9d-bc42-107e17bf5e79","Type":"ContainerStarted","Data":"21d11cf7b3e0e54508032daf2538999c73fcdff9abb5c79c2e18b5c33137d3e4"} Mar 20 16:25:10 crc kubenswrapper[4813]: I0320 16:25:10.518701 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"0768a899-8040-4a9d-bc42-107e17bf5e79","Type":"ContainerStarted","Data":"af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de"} Mar 20 16:25:11 crc kubenswrapper[4813]: I0320 16:25:11.107435 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:12 crc kubenswrapper[4813]: I0320 16:25:12.379991 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:13 crc kubenswrapper[4813]: I0320 16:25:13.540727 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:14 crc kubenswrapper[4813]: I0320 16:25:14.715385 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:15 crc kubenswrapper[4813]: I0320 16:25:15.896288 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:17 crc kubenswrapper[4813]: I0320 16:25:17.102139 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:18 crc kubenswrapper[4813]: I0320 16:25:18.324127 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:18 crc kubenswrapper[4813]: I0320 16:25:18.914040 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:18 crc kubenswrapper[4813]: I0320 16:25:18.959781 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:18 crc kubenswrapper[4813]: I0320 16:25:18.980350 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=10.980324945 podStartE2EDuration="10.980324945s" podCreationTimestamp="2026-03-20 16:25:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:25:10.544194213 +0000 UTC m=+2839.966897054" watchObservedRunningTime="2026-03-20 16:25:18.980324945 +0000 UTC m=+2848.403027786" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.324241 4813 scope.go:117] "RemoveContainer" containerID="e0e53fb9e0d5566adcb24edd21113bcce663c4b8d3aba2c217cf3599f94e0129" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.364326 4813 scope.go:117] "RemoveContainer" containerID="c81fa87e0e734631fdb2fb48fe57ab6dbe7cc49340583611d0c03839fa13feff" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.410877 4813 scope.go:117] "RemoveContainer" containerID="75a20fd563dd85f81f8f5205742ce1365ea95c339ca9d56a571aee10f13abd9a" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.440238 4813 scope.go:117] "RemoveContainer" containerID="622b484c30f222f6a9884bbd7c89e92281f9830334c462ae152c4f611fa8a555" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.472170 4813 scope.go:117] "RemoveContainer" containerID="1a791b265264b88e19f353e17cf480a49c1406bb02f4c6f167f0e1838ad1ac97" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.560035 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.671989 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:19 crc kubenswrapper[4813]: I0320 16:25:19.696875 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:20 crc kubenswrapper[4813]: I0320 16:25:20.830457 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_watcher-kuttl-decision-engine-0_0768a899-8040-4a9d-bc42-107e17bf5e79/watcher-decision-engine/0.log" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.006168 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-98vxl"] Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.018821 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-98vxl"] Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.109225 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher5a5a-account-delete-x7dp8"] Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.110689 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.122994 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher5a5a-account-delete-x7dp8"] Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.124888 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df3eeb2-792c-419e-bdee-4fb9ff863965-operator-scripts\") pod \"watcher5a5a-account-delete-x7dp8\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.124975 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpjlm\" (UniqueName: \"kubernetes.io/projected/8df3eeb2-792c-419e-bdee-4fb9ff863965-kube-api-access-mpjlm\") pod \"watcher5a5a-account-delete-x7dp8\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.183783 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.202320 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.202524 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" containerName="watcher-applier" containerID="cri-o://a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72" gracePeriod=30 Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.232976 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df3eeb2-792c-419e-bdee-4fb9ff863965-operator-scripts\") pod \"watcher5a5a-account-delete-x7dp8\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.233060 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpjlm\" (UniqueName: \"kubernetes.io/projected/8df3eeb2-792c-419e-bdee-4fb9ff863965-kube-api-access-mpjlm\") pod \"watcher5a5a-account-delete-x7dp8\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.234085 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df3eeb2-792c-419e-bdee-4fb9ff863965-operator-scripts\") pod \"watcher5a5a-account-delete-x7dp8\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.333578 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpjlm\" (UniqueName: \"kubernetes.io/projected/8df3eeb2-792c-419e-bdee-4fb9ff863965-kube-api-access-mpjlm\") pod \"watcher5a5a-account-delete-x7dp8\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.337529 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="236617cb-ca1c-4f76-bd23-e81457053874" path="/var/lib/kubelet/pods/236617cb-ca1c-4f76-bd23-e81457053874/volumes" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.338056 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.338222 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-kuttl-api-log" containerID="cri-o://c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150" gracePeriod=30 Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.338616 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-api" containerID="cri-o://69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576" gracePeriod=30 Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.426848 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:21 crc kubenswrapper[4813]: E0320 16:25:21.505116 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a0386d0_150b_4801_a553_9ed04dda83da.slice/crio-c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150.scope\": RecentStats: unable to find data in memory cache]" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.695843 4813 generic.go:334] "Generic (PLEG): container finished" podID="2a0386d0-150b-4801-a553-9ed04dda83da" containerID="c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150" exitCode=143 Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.696751 4813 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" secret="" err="secret \"watcher-watcher-kuttl-dockercfg-ws5vd\" not found" Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.696780 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"2a0386d0-150b-4801-a553-9ed04dda83da","Type":"ContainerDied","Data":"c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150"} Mar 20 16:25:21 crc kubenswrapper[4813]: E0320 16:25:21.751019 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:21 crc kubenswrapper[4813]: E0320 16:25:21.751105 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data podName:0768a899-8040-4a9d-bc42-107e17bf5e79 nodeName:}" failed. No retries permitted until 2026-03-20 16:25:22.251083709 +0000 UTC m=+2851.673786550 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:21 crc kubenswrapper[4813]: I0320 16:25:21.920373 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher5a5a-account-delete-x7dp8"] Mar 20 16:25:21 crc kubenswrapper[4813]: W0320 16:25:21.966832 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8df3eeb2_792c_419e_bdee_4fb9ff863965.slice/crio-783a2dc197e388200bd2cfa60f20bdf9c65b453df6c34931d916fd20600d7ddd WatchSource:0}: Error finding container 783a2dc197e388200bd2cfa60f20bdf9c65b453df6c34931d916fd20600d7ddd: Status 404 returned error can't find the container with id 783a2dc197e388200bd2cfa60f20bdf9c65b453df6c34931d916fd20600d7ddd Mar 20 16:25:22 crc kubenswrapper[4813]: E0320 16:25:22.259885 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:22 crc kubenswrapper[4813]: E0320 16:25:22.260295 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data podName:0768a899-8040-4a9d-bc42-107e17bf5e79 nodeName:}" failed. No retries permitted until 2026-03-20 16:25:23.260274325 +0000 UTC m=+2852.682977166 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.455573 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.463450 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4dx\" (UniqueName: \"kubernetes.io/projected/2a0386d0-150b-4801-a553-9ed04dda83da-kube-api-access-7c4dx\") pod \"2a0386d0-150b-4801-a553-9ed04dda83da\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.463502 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-combined-ca-bundle\") pod \"2a0386d0-150b-4801-a553-9ed04dda83da\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.463558 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-custom-prometheus-ca\") pod \"2a0386d0-150b-4801-a553-9ed04dda83da\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.463593 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-config-data\") pod \"2a0386d0-150b-4801-a553-9ed04dda83da\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.463638 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0386d0-150b-4801-a553-9ed04dda83da-logs\") pod \"2a0386d0-150b-4801-a553-9ed04dda83da\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.463729 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-cert-memcached-mtls\") pod \"2a0386d0-150b-4801-a553-9ed04dda83da\" (UID: \"2a0386d0-150b-4801-a553-9ed04dda83da\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.464164 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a0386d0-150b-4801-a553-9ed04dda83da-logs" (OuterVolumeSpecName: "logs") pod "2a0386d0-150b-4801-a553-9ed04dda83da" (UID: "2a0386d0-150b-4801-a553-9ed04dda83da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.464417 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0386d0-150b-4801-a553-9ed04dda83da-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.470062 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a0386d0-150b-4801-a553-9ed04dda83da-kube-api-access-7c4dx" (OuterVolumeSpecName: "kube-api-access-7c4dx") pod "2a0386d0-150b-4801-a553-9ed04dda83da" (UID: "2a0386d0-150b-4801-a553-9ed04dda83da"). InnerVolumeSpecName "kube-api-access-7c4dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.495630 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a0386d0-150b-4801-a553-9ed04dda83da" (UID: "2a0386d0-150b-4801-a553-9ed04dda83da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.525724 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "2a0386d0-150b-4801-a553-9ed04dda83da" (UID: "2a0386d0-150b-4801-a553-9ed04dda83da"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.551453 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "2a0386d0-150b-4801-a553-9ed04dda83da" (UID: "2a0386d0-150b-4801-a553-9ed04dda83da"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.551884 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.552371 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-config-data" (OuterVolumeSpecName: "config-data") pod "2a0386d0-150b-4801-a553-9ed04dda83da" (UID: "2a0386d0-150b-4801-a553-9ed04dda83da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565032 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqs8n\" (UniqueName: \"kubernetes.io/projected/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-kube-api-access-jqs8n\") pod \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565165 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-cert-memcached-mtls\") pod \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565200 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-combined-ca-bundle\") pod \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565285 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-logs\") pod \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565413 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-config-data\") pod \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\" (UID: \"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9\") " Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565870 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565887 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4dx\" (UniqueName: \"kubernetes.io/projected/2a0386d0-150b-4801-a553-9ed04dda83da-kube-api-access-7c4dx\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565903 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565915 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565926 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0386d0-150b-4801-a553-9ed04dda83da-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.565940 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-logs" (OuterVolumeSpecName: "logs") pod "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" (UID: "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.572999 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-kube-api-access-jqs8n" (OuterVolumeSpecName: "kube-api-access-jqs8n") pod "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" (UID: "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9"). InnerVolumeSpecName "kube-api-access-jqs8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.589428 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" (UID: "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.617601 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-config-data" (OuterVolumeSpecName: "config-data") pod "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" (UID: "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.636423 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" (UID: "f5d85add-ed83-4ebc-a0b4-53839d4bb8d9"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.668086 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.668121 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqs8n\" (UniqueName: \"kubernetes.io/projected/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-kube-api-access-jqs8n\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.668135 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.668147 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.668159 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.707396 4813 generic.go:334] "Generic (PLEG): container finished" podID="8df3eeb2-792c-419e-bdee-4fb9ff863965" containerID="ac32a9cf8c62423ca93032c45df6701187d840b979dc61345472f90c0eb11d0a" exitCode=0 Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.707516 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" event={"ID":"8df3eeb2-792c-419e-bdee-4fb9ff863965","Type":"ContainerDied","Data":"ac32a9cf8c62423ca93032c45df6701187d840b979dc61345472f90c0eb11d0a"} Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.707543 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" event={"ID":"8df3eeb2-792c-419e-bdee-4fb9ff863965","Type":"ContainerStarted","Data":"783a2dc197e388200bd2cfa60f20bdf9c65b453df6c34931d916fd20600d7ddd"} Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.710945 4813 generic.go:334] "Generic (PLEG): container finished" podID="2a0386d0-150b-4801-a553-9ed04dda83da" containerID="69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576" exitCode=0 Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.711008 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"2a0386d0-150b-4801-a553-9ed04dda83da","Type":"ContainerDied","Data":"69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576"} Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.711027 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.711037 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"2a0386d0-150b-4801-a553-9ed04dda83da","Type":"ContainerDied","Data":"d589945abcfdce001b2cc5745951a32ff0a93a4f298a4e0d3c150142796b49d1"} Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.716191 4813 generic.go:334] "Generic (PLEG): container finished" podID="f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" containerID="a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72" exitCode=0 Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.711048 4813 scope.go:117] "RemoveContainer" containerID="69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.716305 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.716202 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9","Type":"ContainerDied","Data":"a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72"} Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.716432 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"f5d85add-ed83-4ebc-a0b4-53839d4bb8d9","Type":"ContainerDied","Data":"60f8ce5f8ebfeee5c9f1818864e44cce62ddbda1c179a27f41ea180a4f657600"} Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.716407 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="0768a899-8040-4a9d-bc42-107e17bf5e79" containerName="watcher-decision-engine" containerID="cri-o://af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de" gracePeriod=30 Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.751246 4813 scope.go:117] "RemoveContainer" containerID="c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.763794 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.774516 4813 scope.go:117] "RemoveContainer" containerID="69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576" Mar 20 16:25:22 crc kubenswrapper[4813]: E0320 16:25:22.774928 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576\": container with ID starting with 69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576 not found: ID does not exist" containerID="69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.774971 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576"} err="failed to get container status \"69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576\": rpc error: code = NotFound desc = could not find container \"69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576\": container with ID starting with 69d660abd2ebd3f4d76edf6d447da148761089653ff9f579c241b7a59acf8576 not found: ID does not exist" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.774998 4813 scope.go:117] "RemoveContainer" containerID="c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150" Mar 20 16:25:22 crc kubenswrapper[4813]: E0320 16:25:22.775246 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150\": container with ID starting with c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150 not found: ID does not exist" containerID="c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.775277 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150"} err="failed to get container status \"c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150\": rpc error: code = NotFound desc = could not find container \"c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150\": container with ID starting with c2be967b6fa777e4edfed95b44ee0571f9d945bc05727eb9f04b134ff30c9150 not found: ID does not exist" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.775297 4813 scope.go:117] "RemoveContainer" containerID="a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.776332 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.785370 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.796552 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.803349 4813 scope.go:117] "RemoveContainer" containerID="a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72" Mar 20 16:25:22 crc kubenswrapper[4813]: E0320 16:25:22.804749 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72\": container with ID starting with a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72 not found: ID does not exist" containerID="a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72" Mar 20 16:25:22 crc kubenswrapper[4813]: I0320 16:25:22.804783 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72"} err="failed to get container status \"a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72\": rpc error: code = NotFound desc = could not find container \"a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72\": container with ID starting with a4ec5a1d1ee992e4ac2a82ab9594c9974cbd565772692e1aff233f3c42dd7c72 not found: ID does not exist" Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.275537 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" path="/var/lib/kubelet/pods/2a0386d0-150b-4801-a553-9ed04dda83da/volumes" Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.276301 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" path="/var/lib/kubelet/pods/f5d85add-ed83-4ebc-a0b4-53839d4bb8d9/volumes" Mar 20 16:25:23 crc kubenswrapper[4813]: E0320 16:25:23.287684 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:23 crc kubenswrapper[4813]: E0320 16:25:23.287787 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data podName:0768a899-8040-4a9d-bc42-107e17bf5e79 nodeName:}" failed. No retries permitted until 2026-03-20 16:25:25.287762902 +0000 UTC m=+2854.710465773 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.547450 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.548060 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-central-agent" containerID="cri-o://dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2" gracePeriod=30 Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.548120 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-notification-agent" containerID="cri-o://0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50" gracePeriod=30 Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.548166 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="sg-core" containerID="cri-o://a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2" gracePeriod=30 Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.548112 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="proxy-httpd" containerID="cri-o://7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056" gracePeriod=30 Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.571364 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.727460 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerDied","Data":"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056"} Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.727467 4813 generic.go:334] "Generic (PLEG): container finished" podID="56100e5f-07eb-4a41-93aa-1958dde77551" containerID="7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056" exitCode=0 Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.727593 4813 generic.go:334] "Generic (PLEG): container finished" podID="56100e5f-07eb-4a41-93aa-1958dde77551" containerID="a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2" exitCode=2 Mar 20 16:25:23 crc kubenswrapper[4813]: I0320 16:25:23.727755 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerDied","Data":"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2"} Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.103594 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.200872 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df3eeb2-792c-419e-bdee-4fb9ff863965-operator-scripts\") pod \"8df3eeb2-792c-419e-bdee-4fb9ff863965\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.200936 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpjlm\" (UniqueName: \"kubernetes.io/projected/8df3eeb2-792c-419e-bdee-4fb9ff863965-kube-api-access-mpjlm\") pod \"8df3eeb2-792c-419e-bdee-4fb9ff863965\" (UID: \"8df3eeb2-792c-419e-bdee-4fb9ff863965\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.201662 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8df3eeb2-792c-419e-bdee-4fb9ff863965-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8df3eeb2-792c-419e-bdee-4fb9ff863965" (UID: "8df3eeb2-792c-419e-bdee-4fb9ff863965"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.208820 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8df3eeb2-792c-419e-bdee-4fb9ff863965-kube-api-access-mpjlm" (OuterVolumeSpecName: "kube-api-access-mpjlm") pod "8df3eeb2-792c-419e-bdee-4fb9ff863965" (UID: "8df3eeb2-792c-419e-bdee-4fb9ff863965"). InnerVolumeSpecName "kube-api-access-mpjlm". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.302868 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8df3eeb2-792c-419e-bdee-4fb9ff863965-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.302918 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpjlm\" (UniqueName: \"kubernetes.io/projected/8df3eeb2-792c-419e-bdee-4fb9ff863965-kube-api-access-mpjlm\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.610735 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713076 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-log-httpd\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713498 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlwsh\" (UniqueName: \"kubernetes.io/projected/56100e5f-07eb-4a41-93aa-1958dde77551-kube-api-access-hlwsh\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713542 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-combined-ca-bundle\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713576 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-run-httpd\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713619 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-sg-core-conf-yaml\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713673 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-scripts\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713702 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-config-data\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713711 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713741 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-ceilometer-tls-certs\") pod \"56100e5f-07eb-4a41-93aa-1958dde77551\" (UID: \"56100e5f-07eb-4a41-93aa-1958dde77551\") " Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.713907 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.714264 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.714289 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/56100e5f-07eb-4a41-93aa-1958dde77551-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.719106 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-scripts" (OuterVolumeSpecName: "scripts") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.720786 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56100e5f-07eb-4a41-93aa-1958dde77551-kube-api-access-hlwsh" (OuterVolumeSpecName: "kube-api-access-hlwsh") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "kube-api-access-hlwsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.738809 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.752215 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.752258 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher5a5a-account-delete-x7dp8" event={"ID":"8df3eeb2-792c-419e-bdee-4fb9ff863965","Type":"ContainerDied","Data":"783a2dc197e388200bd2cfa60f20bdf9c65b453df6c34931d916fd20600d7ddd"} Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.752348 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="783a2dc197e388200bd2cfa60f20bdf9c65b453df6c34931d916fd20600d7ddd" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.756239 4813 generic.go:334] "Generic (PLEG): container finished" podID="56100e5f-07eb-4a41-93aa-1958dde77551" containerID="0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50" exitCode=0 Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.756264 4813 generic.go:334] "Generic (PLEG): container finished" podID="56100e5f-07eb-4a41-93aa-1958dde77551" containerID="dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2" exitCode=0 Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.756280 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerDied","Data":"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50"} Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.756330 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.756355 4813 scope.go:117] "RemoveContainer" containerID="7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.756342 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerDied","Data":"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2"} Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.756470 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"56100e5f-07eb-4a41-93aa-1958dde77551","Type":"ContainerDied","Data":"6c526145af8e56cd03915d3c9bc5a5f5dc0a4e4f822400e7202754b1f035dd19"} Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.765292 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.784966 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.807600 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-config-data" (OuterVolumeSpecName: "config-data") pod "56100e5f-07eb-4a41-93aa-1958dde77551" (UID: "56100e5f-07eb-4a41-93aa-1958dde77551"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.816092 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.816121 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlwsh\" (UniqueName: \"kubernetes.io/projected/56100e5f-07eb-4a41-93aa-1958dde77551-kube-api-access-hlwsh\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.816136 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.816149 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.816160 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.816170 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56100e5f-07eb-4a41-93aa-1958dde77551-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.826906 4813 scope.go:117] "RemoveContainer" containerID="a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.845956 4813 scope.go:117] "RemoveContainer" containerID="0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.862748 4813 scope.go:117] "RemoveContainer" containerID="dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.877643 4813 scope.go:117] "RemoveContainer" containerID="7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056" Mar 20 16:25:24 crc kubenswrapper[4813]: E0320 16:25:24.877977 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056\": container with ID starting with 7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056 not found: ID does not exist" containerID="7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878008 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056"} err="failed to get container status \"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056\": rpc error: code = NotFound desc = could not find container \"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056\": container with ID starting with 7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056 not found: ID does not exist" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878026 4813 scope.go:117] "RemoveContainer" containerID="a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2" Mar 20 16:25:24 crc kubenswrapper[4813]: E0320 16:25:24.878301 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2\": container with ID starting with a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2 not found: ID does not exist" containerID="a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878324 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2"} err="failed to get container status \"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2\": rpc error: code = NotFound desc = could not find container \"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2\": container with ID starting with a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2 not found: ID does not exist" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878342 4813 scope.go:117] "RemoveContainer" containerID="0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50" Mar 20 16:25:24 crc kubenswrapper[4813]: E0320 16:25:24.878632 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50\": container with ID starting with 0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50 not found: ID does not exist" containerID="0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878653 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50"} err="failed to get container status \"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50\": rpc error: code = NotFound desc = could not find container \"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50\": container with ID starting with 0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50 not found: ID does not exist" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878666 4813 scope.go:117] "RemoveContainer" containerID="dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2" Mar 20 16:25:24 crc kubenswrapper[4813]: E0320 16:25:24.878868 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2\": container with ID starting with dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2 not found: ID does not exist" containerID="dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878884 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2"} err="failed to get container status \"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2\": rpc error: code = NotFound desc = could not find container \"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2\": container with ID starting with dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2 not found: ID does not exist" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.878895 4813 scope.go:117] "RemoveContainer" containerID="7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.879130 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056"} err="failed to get container status \"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056\": rpc error: code = NotFound desc = could not find container \"7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056\": container with ID starting with 7a4b857acc5f48b1eed35982c0a0a0c921cff2fac40c41908f8286d5d96d7056 not found: ID does not exist" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.879151 4813 scope.go:117] "RemoveContainer" containerID="a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.879364 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2"} err="failed to get container status \"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2\": rpc error: code = NotFound desc = could not find container \"a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2\": container with ID starting with a470ea7a34f70d7179863647a1fd01d9716b6ba356e282d0945b3bd64e52cbf2 not found: ID does not exist" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.879383 4813 scope.go:117] "RemoveContainer" containerID="0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.879634 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50"} err="failed to get container status \"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50\": rpc error: code = NotFound desc = could not find container \"0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50\": container with ID starting with 0993c6f1cc00cd2da2506864240573bce4f3be6848dabda79d146c173064bf50 not found: ID does not exist" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.879652 4813 scope.go:117] "RemoveContainer" containerID="dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2" Mar 20 16:25:24 crc kubenswrapper[4813]: I0320 16:25:24.879873 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2"} err="failed to get container status \"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2\": rpc error: code = NotFound desc = could not find container \"dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2\": container with ID starting with dcee3c30caa6097e4b7a6ab203fa499bc645c2a8c904c7895cd95e82374e8ac2 not found: ID does not exist" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.090080 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.096821 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114360 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114695 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="proxy-httpd" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114720 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="proxy-httpd" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114732 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" containerName="watcher-applier" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114738 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" containerName="watcher-applier" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114753 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8df3eeb2-792c-419e-bdee-4fb9ff863965" containerName="mariadb-account-delete" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114760 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8df3eeb2-792c-419e-bdee-4fb9ff863965" containerName="mariadb-account-delete" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114778 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-central-agent" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114784 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-central-agent" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114794 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-kuttl-api-log" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114799 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-kuttl-api-log" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114809 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="sg-core" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114814 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="sg-core" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114825 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-api" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114831 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-api" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.114841 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-notification-agent" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114847 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-notification-agent" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114982 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5d85add-ed83-4ebc-a0b4-53839d4bb8d9" containerName="watcher-applier" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.114997 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-api" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.115007 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="sg-core" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.115018 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8df3eeb2-792c-419e-bdee-4fb9ff863965" containerName="mariadb-account-delete" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.115029 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-central-agent" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.115042 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a0386d0-150b-4801-a553-9ed04dda83da" containerName="watcher-kuttl-api-log" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.115048 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="ceilometer-notification-agent" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.115055 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" containerName="proxy-httpd" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.116519 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.118996 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.119476 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.119806 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.152920 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.229842 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.229918 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nkng\" (UniqueName: \"kubernetes.io/projected/8c913f28-bf56-4cb0-9b77-7a589e30e18b-kube-api-access-9nkng\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.230114 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-log-httpd\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.230218 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-run-httpd\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.230263 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.230299 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-config-data\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.230416 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.230471 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-scripts\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.274525 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56100e5f-07eb-4a41-93aa-1958dde77551" path="/var/lib/kubelet/pods/56100e5f-07eb-4a41-93aa-1958dde77551/volumes" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.331959 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332016 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nkng\" (UniqueName: \"kubernetes.io/projected/8c913f28-bf56-4cb0-9b77-7a589e30e18b-kube-api-access-9nkng\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332096 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-log-httpd\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332129 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-run-httpd\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332152 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332176 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-config-data\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332220 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332237 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-scripts\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.332779 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-run-httpd\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.332852 4813 secret.go:188] Couldn't get secret watcher-kuttl-default/watcher-kuttl-decision-engine-config-data: secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:25 crc kubenswrapper[4813]: E0320 16:25:25.332925 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data podName:0768a899-8040-4a9d-bc42-107e17bf5e79 nodeName:}" failed. No retries permitted until 2026-03-20 16:25:29.332905421 +0000 UTC m=+2858.755608332 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data") pod "watcher-kuttl-decision-engine-0" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79") : secret "watcher-kuttl-decision-engine-config-data" not found Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.333008 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-log-httpd\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.338748 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-config-data\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.339046 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.341244 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-scripts\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.347320 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.347865 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.372265 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nkng\" (UniqueName: \"kubernetes.io/projected/8c913f28-bf56-4cb0-9b77-7a589e30e18b-kube-api-access-9nkng\") pod \"ceilometer-0\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.434205 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:25 crc kubenswrapper[4813]: I0320 16:25:25.871541 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:25 crc kubenswrapper[4813]: W0320 16:25:25.871655 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c913f28_bf56_4cb0_9b77_7a589e30e18b.slice/crio-abde57282c134c2f11680ccc3b142a80f95ef5a4e97b12af7450065c5463fb25 WatchSource:0}: Error finding container abde57282c134c2f11680ccc3b142a80f95ef5a4e97b12af7450065c5463fb25: Status 404 returned error can't find the container with id abde57282c134c2f11680ccc3b142a80f95ef5a4e97b12af7450065c5463fb25 Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.162407 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qjrdt"] Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.177768 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-qjrdt"] Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.189708 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher5a5a-account-delete-x7dp8"] Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.198844 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher5a5a-account-delete-x7dp8"] Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.212072 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k"] Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.212123 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-5a5a-account-create-update-2kb4k"] Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.605784 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.658155 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data\") pod \"0768a899-8040-4a9d-bc42-107e17bf5e79\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.658217 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-custom-prometheus-ca\") pod \"0768a899-8040-4a9d-bc42-107e17bf5e79\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.658258 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqkkf\" (UniqueName: \"kubernetes.io/projected/0768a899-8040-4a9d-bc42-107e17bf5e79-kube-api-access-mqkkf\") pod \"0768a899-8040-4a9d-bc42-107e17bf5e79\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.658278 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0768a899-8040-4a9d-bc42-107e17bf5e79-logs\") pod \"0768a899-8040-4a9d-bc42-107e17bf5e79\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.658347 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-cert-memcached-mtls\") pod \"0768a899-8040-4a9d-bc42-107e17bf5e79\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.658384 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-combined-ca-bundle\") pod \"0768a899-8040-4a9d-bc42-107e17bf5e79\" (UID: \"0768a899-8040-4a9d-bc42-107e17bf5e79\") " Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.659392 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0768a899-8040-4a9d-bc42-107e17bf5e79-logs" (OuterVolumeSpecName: "logs") pod "0768a899-8040-4a9d-bc42-107e17bf5e79" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.664597 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0768a899-8040-4a9d-bc42-107e17bf5e79-kube-api-access-mqkkf" (OuterVolumeSpecName: "kube-api-access-mqkkf") pod "0768a899-8040-4a9d-bc42-107e17bf5e79" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79"). InnerVolumeSpecName "kube-api-access-mqkkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.696096 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0768a899-8040-4a9d-bc42-107e17bf5e79" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.700669 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "0768a899-8040-4a9d-bc42-107e17bf5e79" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.704707 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data" (OuterVolumeSpecName: "config-data") pod "0768a899-8040-4a9d-bc42-107e17bf5e79" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.740226 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "0768a899-8040-4a9d-bc42-107e17bf5e79" (UID: "0768a899-8040-4a9d-bc42-107e17bf5e79"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.760422 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.760463 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.760493 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqkkf\" (UniqueName: \"kubernetes.io/projected/0768a899-8040-4a9d-bc42-107e17bf5e79-kube-api-access-mqkkf\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.760507 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0768a899-8040-4a9d-bc42-107e17bf5e79-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.760517 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.760528 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0768a899-8040-4a9d-bc42-107e17bf5e79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.778871 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerStarted","Data":"7313b6dc7eb43c28dce26019cf5e007eb0be5941b02e95a37818be456ed0b281"} Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.778923 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerStarted","Data":"abde57282c134c2f11680ccc3b142a80f95ef5a4e97b12af7450065c5463fb25"} Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.781002 4813 generic.go:334] "Generic (PLEG): container finished" podID="0768a899-8040-4a9d-bc42-107e17bf5e79" containerID="af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de" exitCode=0 Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.781037 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"0768a899-8040-4a9d-bc42-107e17bf5e79","Type":"ContainerDied","Data":"af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de"} Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.781059 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"0768a899-8040-4a9d-bc42-107e17bf5e79","Type":"ContainerDied","Data":"21d11cf7b3e0e54508032daf2538999c73fcdff9abb5c79c2e18b5c33137d3e4"} Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.781082 4813 scope.go:117] "RemoveContainer" containerID="af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.781188 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.818138 4813 scope.go:117] "RemoveContainer" containerID="af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de" Mar 20 16:25:26 crc kubenswrapper[4813]: E0320 16:25:26.818880 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de\": container with ID starting with af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de not found: ID does not exist" containerID="af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.818907 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de"} err="failed to get container status \"af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de\": rpc error: code = NotFound desc = could not find container \"af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de\": container with ID starting with af534636fde735ce8380b22fb7f248dd3c87f855a69f8b0e37c186fae45ad9de not found: ID does not exist" Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.827706 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:26 crc kubenswrapper[4813]: I0320 16:25:26.836143 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:27 crc kubenswrapper[4813]: I0320 16:25:27.284368 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0768a899-8040-4a9d-bc42-107e17bf5e79" path="/var/lib/kubelet/pods/0768a899-8040-4a9d-bc42-107e17bf5e79/volumes" Mar 20 16:25:27 crc kubenswrapper[4813]: I0320 16:25:27.285746 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8df3eeb2-792c-419e-bdee-4fb9ff863965" path="/var/lib/kubelet/pods/8df3eeb2-792c-419e-bdee-4fb9ff863965/volumes" Mar 20 16:25:27 crc kubenswrapper[4813]: I0320 16:25:27.286833 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed1a0f2e-c2b6-47bd-a85d-07044d9563a2" path="/var/lib/kubelet/pods/ed1a0f2e-c2b6-47bd-a85d-07044d9563a2/volumes" Mar 20 16:25:27 crc kubenswrapper[4813]: I0320 16:25:27.289033 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc732f79-010e-47e2-87ab-f6a1c40c7288" path="/var/lib/kubelet/pods/fc732f79-010e-47e2-87ab-f6a1c40c7288/volumes" Mar 20 16:25:27 crc kubenswrapper[4813]: I0320 16:25:27.792309 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerStarted","Data":"2071818e4b7d605a4faa95451c241da4ed7fdfb43008edaf7fe65b52da30dbf2"} Mar 20 16:25:27 crc kubenswrapper[4813]: I0320 16:25:27.792583 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerStarted","Data":"e276d9f7f91e9855d81eb87283bb89deb8932e7c2090c0a78a7779573bb4139c"} Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.252696 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-d736-account-create-update-vq2rf"] Mar 20 16:25:28 crc kubenswrapper[4813]: E0320 16:25:28.253335 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0768a899-8040-4a9d-bc42-107e17bf5e79" containerName="watcher-decision-engine" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.253353 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0768a899-8040-4a9d-bc42-107e17bf5e79" containerName="watcher-decision-engine" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.253501 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0768a899-8040-4a9d-bc42-107e17bf5e79" containerName="watcher-decision-engine" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.254012 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.257189 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-db-secret" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.262032 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-db-create-c5g9t"] Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.263418 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.268424 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-d736-account-create-update-vq2rf"] Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.284493 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-c5g9t"] Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.287563 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479b8658-bec4-4db2-9ac0-992e8bf35f0d-operator-scripts\") pod \"watcher-d736-account-create-update-vq2rf\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.287638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7rfv\" (UniqueName: \"kubernetes.io/projected/479b8658-bec4-4db2-9ac0-992e8bf35f0d-kube-api-access-c7rfv\") pod \"watcher-d736-account-create-update-vq2rf\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.389349 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479b8658-bec4-4db2-9ac0-992e8bf35f0d-operator-scripts\") pod \"watcher-d736-account-create-update-vq2rf\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.389555 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7rfv\" (UniqueName: \"kubernetes.io/projected/479b8658-bec4-4db2-9ac0-992e8bf35f0d-kube-api-access-c7rfv\") pod \"watcher-d736-account-create-update-vq2rf\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.389621 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-operator-scripts\") pod \"watcher-db-create-c5g9t\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.389649 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdqqw\" (UniqueName: \"kubernetes.io/projected/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-kube-api-access-rdqqw\") pod \"watcher-db-create-c5g9t\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.390064 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479b8658-bec4-4db2-9ac0-992e8bf35f0d-operator-scripts\") pod \"watcher-d736-account-create-update-vq2rf\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.407333 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7rfv\" (UniqueName: \"kubernetes.io/projected/479b8658-bec4-4db2-9ac0-992e8bf35f0d-kube-api-access-c7rfv\") pod \"watcher-d736-account-create-update-vq2rf\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.491181 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-operator-scripts\") pod \"watcher-db-create-c5g9t\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.491231 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdqqw\" (UniqueName: \"kubernetes.io/projected/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-kube-api-access-rdqqw\") pod \"watcher-db-create-c5g9t\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.492048 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-operator-scripts\") pod \"watcher-db-create-c5g9t\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.509777 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdqqw\" (UniqueName: \"kubernetes.io/projected/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-kube-api-access-rdqqw\") pod \"watcher-db-create-c5g9t\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.610503 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:28 crc kubenswrapper[4813]: I0320 16:25:28.619821 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.150697 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-d736-account-create-update-vq2rf"] Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.305540 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-db-create-c5g9t"] Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.820376 4813 generic.go:334] "Generic (PLEG): container finished" podID="479b8658-bec4-4db2-9ac0-992e8bf35f0d" containerID="60ad7fe9d32aae62c8bd0e22facefe1fb91f0420aec5bef4f48af187bb0e4d76" exitCode=0 Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.820610 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" event={"ID":"479b8658-bec4-4db2-9ac0-992e8bf35f0d","Type":"ContainerDied","Data":"60ad7fe9d32aae62c8bd0e22facefe1fb91f0420aec5bef4f48af187bb0e4d76"} Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.821049 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" event={"ID":"479b8658-bec4-4db2-9ac0-992e8bf35f0d","Type":"ContainerStarted","Data":"953271ba56c922406cb78c8ee2f651221f0f2986572357c803c3cf1272982b44"} Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.822744 4813 generic.go:334] "Generic (PLEG): container finished" podID="e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf" containerID="ca6e68e6dbfbcda8642182fc54f06affe2753a944d7af62beb9276127bffc0b9" exitCode=0 Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.822779 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-c5g9t" event={"ID":"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf","Type":"ContainerDied","Data":"ca6e68e6dbfbcda8642182fc54f06affe2753a944d7af62beb9276127bffc0b9"} Mar 20 16:25:29 crc kubenswrapper[4813]: I0320 16:25:29.822806 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-c5g9t" event={"ID":"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf","Type":"ContainerStarted","Data":"79156ffa21f0a70ddd157c49174e94498df859d8de73ea007713972a4e573a94"} Mar 20 16:25:30 crc kubenswrapper[4813]: I0320 16:25:30.834165 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerStarted","Data":"28b2b2b691a05ab2d7dd38f76b3c798cadbb33b2c1ab7c3ca44e83672c7a52c3"} Mar 20 16:25:30 crc kubenswrapper[4813]: I0320 16:25:30.834305 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:30 crc kubenswrapper[4813]: I0320 16:25:30.863326 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.1466307589999998 podStartE2EDuration="5.863306s" podCreationTimestamp="2026-03-20 16:25:25 +0000 UTC" firstStartedPulling="2026-03-20 16:25:25.873554046 +0000 UTC m=+2855.296256887" lastFinishedPulling="2026-03-20 16:25:29.590229287 +0000 UTC m=+2859.012932128" observedRunningTime="2026-03-20 16:25:30.855410557 +0000 UTC m=+2860.278113398" watchObservedRunningTime="2026-03-20 16:25:30.863306 +0000 UTC m=+2860.286008831" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.230108 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.318640 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.373137 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-operator-scripts\") pod \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.373236 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdqqw\" (UniqueName: \"kubernetes.io/projected/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-kube-api-access-rdqqw\") pod \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\" (UID: \"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf\") " Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.373300 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479b8658-bec4-4db2-9ac0-992e8bf35f0d-operator-scripts\") pod \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.373346 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7rfv\" (UniqueName: \"kubernetes.io/projected/479b8658-bec4-4db2-9ac0-992e8bf35f0d-kube-api-access-c7rfv\") pod \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\" (UID: \"479b8658-bec4-4db2-9ac0-992e8bf35f0d\") " Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.373862 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf" (UID: "e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.374518 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/479b8658-bec4-4db2-9ac0-992e8bf35f0d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "479b8658-bec4-4db2-9ac0-992e8bf35f0d" (UID: "479b8658-bec4-4db2-9ac0-992e8bf35f0d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.377336 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-kube-api-access-rdqqw" (OuterVolumeSpecName: "kube-api-access-rdqqw") pod "e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf" (UID: "e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf"). InnerVolumeSpecName "kube-api-access-rdqqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.379407 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/479b8658-bec4-4db2-9ac0-992e8bf35f0d-kube-api-access-c7rfv" (OuterVolumeSpecName: "kube-api-access-c7rfv") pod "479b8658-bec4-4db2-9ac0-992e8bf35f0d" (UID: "479b8658-bec4-4db2-9ac0-992e8bf35f0d"). InnerVolumeSpecName "kube-api-access-c7rfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.475602 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdqqw\" (UniqueName: \"kubernetes.io/projected/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-kube-api-access-rdqqw\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.475629 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/479b8658-bec4-4db2-9ac0-992e8bf35f0d-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.475638 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7rfv\" (UniqueName: \"kubernetes.io/projected/479b8658-bec4-4db2-9ac0-992e8bf35f0d-kube-api-access-c7rfv\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.475647 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.840438 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-db-create-c5g9t" event={"ID":"e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf","Type":"ContainerDied","Data":"79156ffa21f0a70ddd157c49174e94498df859d8de73ea007713972a4e573a94"} Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.840493 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79156ffa21f0a70ddd157c49174e94498df859d8de73ea007713972a4e573a94" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.840559 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-db-create-c5g9t" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.844284 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.852652 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-d736-account-create-update-vq2rf" event={"ID":"479b8658-bec4-4db2-9ac0-992e8bf35f0d","Type":"ContainerDied","Data":"953271ba56c922406cb78c8ee2f651221f0f2986572357c803c3cf1272982b44"} Mar 20 16:25:31 crc kubenswrapper[4813]: I0320 16:25:31.852704 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="953271ba56c922406cb78c8ee2f651221f0f2986572357c803c3cf1272982b44" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.500512 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4"] Mar 20 16:25:33 crc kubenswrapper[4813]: E0320 16:25:33.501046 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="479b8658-bec4-4db2-9ac0-992e8bf35f0d" containerName="mariadb-account-create-update" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.501057 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="479b8658-bec4-4db2-9ac0-992e8bf35f0d" containerName="mariadb-account-create-update" Mar 20 16:25:33 crc kubenswrapper[4813]: E0320 16:25:33.501071 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf" containerName="mariadb-database-create" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.501077 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf" containerName="mariadb-database-create" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.501216 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf" containerName="mariadb-database-create" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.501232 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="479b8658-bec4-4db2-9ac0-992e8bf35f0d" containerName="mariadb-account-create-update" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.501766 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.504020 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-sqrp6" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.504392 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-config-data" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.512056 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4"] Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.608468 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-db-sync-config-data\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.608657 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-config-data\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.608895 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcpvt\" (UniqueName: \"kubernetes.io/projected/5379087a-bcdc-4d71-a12e-6152c1987c51-kube-api-access-xcpvt\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.608973 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.709996 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-db-sync-config-data\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.710055 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-config-data\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.710136 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcpvt\" (UniqueName: \"kubernetes.io/projected/5379087a-bcdc-4d71-a12e-6152c1987c51-kube-api-access-xcpvt\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.710168 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.715066 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-db-sync-config-data\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.715687 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-combined-ca-bundle\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.722127 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-config-data\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.729188 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcpvt\" (UniqueName: \"kubernetes.io/projected/5379087a-bcdc-4d71-a12e-6152c1987c51-kube-api-access-xcpvt\") pod \"watcher-kuttl-db-sync-jfkc4\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:33 crc kubenswrapper[4813]: I0320 16:25:33.820862 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:34 crc kubenswrapper[4813]: I0320 16:25:34.336233 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4"] Mar 20 16:25:34 crc kubenswrapper[4813]: I0320 16:25:34.874851 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" event={"ID":"5379087a-bcdc-4d71-a12e-6152c1987c51","Type":"ContainerStarted","Data":"0084f4780ab0fbfeb9661a7b5d49a726f5dee1a99d5205505567f372733eb033"} Mar 20 16:25:34 crc kubenswrapper[4813]: I0320 16:25:34.875343 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" event={"ID":"5379087a-bcdc-4d71-a12e-6152c1987c51","Type":"ContainerStarted","Data":"0238ccbaeca9c29a7b5d44f64c8c1678834c7b7056412bf2b9c702d05db818f9"} Mar 20 16:25:34 crc kubenswrapper[4813]: I0320 16:25:34.897393 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" podStartSLOduration=1.897368352 podStartE2EDuration="1.897368352s" podCreationTimestamp="2026-03-20 16:25:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:25:34.890721963 +0000 UTC m=+2864.313424804" watchObservedRunningTime="2026-03-20 16:25:34.897368352 +0000 UTC m=+2864.320071193" Mar 20 16:25:37 crc kubenswrapper[4813]: I0320 16:25:37.900969 4813 generic.go:334] "Generic (PLEG): container finished" podID="5379087a-bcdc-4d71-a12e-6152c1987c51" containerID="0084f4780ab0fbfeb9661a7b5d49a726f5dee1a99d5205505567f372733eb033" exitCode=0 Mar 20 16:25:37 crc kubenswrapper[4813]: I0320 16:25:37.901053 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" event={"ID":"5379087a-bcdc-4d71-a12e-6152c1987c51","Type":"ContainerDied","Data":"0084f4780ab0fbfeb9661a7b5d49a726f5dee1a99d5205505567f372733eb033"} Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.294422 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.405365 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-db-sync-config-data\") pod \"5379087a-bcdc-4d71-a12e-6152c1987c51\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.405670 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-config-data\") pod \"5379087a-bcdc-4d71-a12e-6152c1987c51\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.405729 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcpvt\" (UniqueName: \"kubernetes.io/projected/5379087a-bcdc-4d71-a12e-6152c1987c51-kube-api-access-xcpvt\") pod \"5379087a-bcdc-4d71-a12e-6152c1987c51\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.405826 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-combined-ca-bundle\") pod \"5379087a-bcdc-4d71-a12e-6152c1987c51\" (UID: \"5379087a-bcdc-4d71-a12e-6152c1987c51\") " Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.411389 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5379087a-bcdc-4d71-a12e-6152c1987c51" (UID: "5379087a-bcdc-4d71-a12e-6152c1987c51"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.426770 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5379087a-bcdc-4d71-a12e-6152c1987c51-kube-api-access-xcpvt" (OuterVolumeSpecName: "kube-api-access-xcpvt") pod "5379087a-bcdc-4d71-a12e-6152c1987c51" (UID: "5379087a-bcdc-4d71-a12e-6152c1987c51"). InnerVolumeSpecName "kube-api-access-xcpvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.431756 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5379087a-bcdc-4d71-a12e-6152c1987c51" (UID: "5379087a-bcdc-4d71-a12e-6152c1987c51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.449401 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-config-data" (OuterVolumeSpecName: "config-data") pod "5379087a-bcdc-4d71-a12e-6152c1987c51" (UID: "5379087a-bcdc-4d71-a12e-6152c1987c51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.507778 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.507834 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.507852 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5379087a-bcdc-4d71-a12e-6152c1987c51-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.507871 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcpvt\" (UniqueName: \"kubernetes.io/projected/5379087a-bcdc-4d71-a12e-6152c1987c51-kube-api-access-xcpvt\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.919952 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" event={"ID":"5379087a-bcdc-4d71-a12e-6152c1987c51","Type":"ContainerDied","Data":"0238ccbaeca9c29a7b5d44f64c8c1678834c7b7056412bf2b9c702d05db818f9"} Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.919997 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0238ccbaeca9c29a7b5d44f64c8c1678834c7b7056412bf2b9c702d05db818f9" Mar 20 16:25:39 crc kubenswrapper[4813]: I0320 16:25:39.920010 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.216853 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:25:40 crc kubenswrapper[4813]: E0320 16:25:40.217171 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5379087a-bcdc-4d71-a12e-6152c1987c51" containerName="watcher-kuttl-db-sync" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.217183 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5379087a-bcdc-4d71-a12e-6152c1987c51" containerName="watcher-kuttl-db-sync" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.217339 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5379087a-bcdc-4d71-a12e-6152c1987c51" containerName="watcher-kuttl-db-sync" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.218125 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.220925 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-api-config-data" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.222160 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-watcher-kuttl-dockercfg-sqrp6" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.229722 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.238116 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.239791 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.257241 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.278990 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.281283 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.286419 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-decision-engine-config-data" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.295986 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.315027 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.316167 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317476 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvktp\" (UniqueName: \"kubernetes.io/projected/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-kube-api-access-hvktp\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317564 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317593 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317628 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdmg8\" (UniqueName: \"kubernetes.io/projected/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-kube-api-access-fdmg8\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317647 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-logs\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317679 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317725 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-config-data\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317869 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-combined-ca-bundle\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317937 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-logs\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.317957 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-custom-prometheus-ca\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.318026 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-cert-memcached-mtls\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.318062 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.318762 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"watcher-kuttl-applier-config-data" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.350477 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.419982 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420029 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420049 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420079 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420095 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e44045b-211f-4bf2-b884-5248a0dc8fca-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420117 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420144 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s8tf\" (UniqueName: \"kubernetes.io/projected/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-kube-api-access-6s8tf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420160 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdmg8\" (UniqueName: \"kubernetes.io/projected/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-kube-api-access-fdmg8\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420175 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-logs\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420260 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420329 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-config-data\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420368 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420406 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-combined-ca-bundle\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420428 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420466 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420504 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-custom-prometheus-ca\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420519 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-logs\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420523 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-logs\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420575 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420594 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-cert-memcached-mtls\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420615 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420664 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420681 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvktp\" (UniqueName: \"kubernetes.io/projected/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-kube-api-access-hvktp\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420698 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m425k\" (UniqueName: \"kubernetes.io/projected/7e44045b-211f-4bf2-b884-5248a0dc8fca-kube-api-access-m425k\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.420899 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-logs\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.423927 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-cert-memcached-mtls\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.424268 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-custom-prometheus-ca\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.426188 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-combined-ca-bundle\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.426511 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-combined-ca-bundle\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.427326 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-config-data\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.428941 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-custom-prometheus-ca\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.433098 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-config-data\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.435012 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-cert-memcached-mtls\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.437692 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvktp\" (UniqueName: \"kubernetes.io/projected/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-kube-api-access-hvktp\") pod \"watcher-kuttl-api-0\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.439092 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdmg8\" (UniqueName: \"kubernetes.io/projected/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-kube-api-access-fdmg8\") pod \"watcher-kuttl-api-1\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522105 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s8tf\" (UniqueName: \"kubernetes.io/projected/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-kube-api-access-6s8tf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522585 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522639 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522689 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522745 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522855 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522887 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m425k\" (UniqueName: \"kubernetes.io/projected/7e44045b-211f-4bf2-b884-5248a0dc8fca-kube-api-access-m425k\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522914 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522943 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.522967 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.523003 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e44045b-211f-4bf2-b884-5248a0dc8fca-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.523623 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e44045b-211f-4bf2-b884-5248a0dc8fca-logs\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.523951 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-logs\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.527923 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-combined-ca-bundle\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.527969 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-cert-memcached-mtls\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.528236 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-combined-ca-bundle\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.528339 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-config-data\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.529751 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-custom-prometheus-ca\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.532153 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-config-data\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.538945 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-cert-memcached-mtls\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.538957 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.542120 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m425k\" (UniqueName: \"kubernetes.io/projected/7e44045b-211f-4bf2-b884-5248a0dc8fca-kube-api-access-m425k\") pod \"watcher-kuttl-applier-0\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.557221 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s8tf\" (UniqueName: \"kubernetes.io/projected/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-kube-api-access-6s8tf\") pod \"watcher-kuttl-decision-engine-0\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.564381 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.605901 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:40 crc kubenswrapper[4813]: I0320 16:25:40.641317 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.035770 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.047368 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.173655 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.279394 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.938432 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e","Type":"ContainerStarted","Data":"ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.938974 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e","Type":"ContainerStarted","Data":"e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.938985 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e","Type":"ContainerStarted","Data":"9b17a7d1c7ce02d08d60f9b998ab04ad615120b1a2c16a99d594c5b6636eb442"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.940227 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.941806 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"7b7d20da-8572-4a49-ae8c-f36e8c123ff1","Type":"ContainerStarted","Data":"af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.941832 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"7b7d20da-8572-4a49-ae8c-f36e8c123ff1","Type":"ContainerStarted","Data":"aa9dd1b011e664b47cedeabc9d3dd5f1e62778c140a276acb4b699db4d867352"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.942980 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"7e44045b-211f-4bf2-b884-5248a0dc8fca","Type":"ContainerStarted","Data":"1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.943022 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"7e44045b-211f-4bf2-b884-5248a0dc8fca","Type":"ContainerStarted","Data":"d3eb9b58be44d8db23731b5afc1b13aa58914577714a78f0aea22233ae112c86"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.946096 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d","Type":"ContainerStarted","Data":"6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.946132 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d","Type":"ContainerStarted","Data":"cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.946140 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d","Type":"ContainerStarted","Data":"77cb4e8c94f0c09dca2f4e83f917cca1cf753a535c16931ceab6deefeb7108bb"} Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.946710 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.948250 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.1.7:9322/\": dial tcp 10.217.1.7:9322: connect: connection refused" Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.969713 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-1" podStartSLOduration=1.9696952639999998 podStartE2EDuration="1.969695264s" podCreationTimestamp="2026-03-20 16:25:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:25:41.96510109 +0000 UTC m=+2871.387803931" watchObservedRunningTime="2026-03-20 16:25:41.969695264 +0000 UTC m=+2871.392398105" Mar 20 16:25:41 crc kubenswrapper[4813]: I0320 16:25:41.985277 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podStartSLOduration=1.985258464 podStartE2EDuration="1.985258464s" podCreationTimestamp="2026-03-20 16:25:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:25:41.98067881 +0000 UTC m=+2871.403381651" watchObservedRunningTime="2026-03-20 16:25:41.985258464 +0000 UTC m=+2871.407961295" Mar 20 16:25:42 crc kubenswrapper[4813]: I0320 16:25:42.009371 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-0" podStartSLOduration=2.009350954 podStartE2EDuration="2.009350954s" podCreationTimestamp="2026-03-20 16:25:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:25:42.006036185 +0000 UTC m=+2871.428739026" watchObservedRunningTime="2026-03-20 16:25:42.009350954 +0000 UTC m=+2871.432053795" Mar 20 16:25:42 crc kubenswrapper[4813]: I0320 16:25:42.030980 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podStartSLOduration=2.030963657 podStartE2EDuration="2.030963657s" podCreationTimestamp="2026-03-20 16:25:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:25:42.027531574 +0000 UTC m=+2871.450234415" watchObservedRunningTime="2026-03-20 16:25:42.030963657 +0000 UTC m=+2871.453666498" Mar 20 16:25:43 crc kubenswrapper[4813]: I0320 16:25:43.962454 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 16:25:44 crc kubenswrapper[4813]: I0320 16:25:44.388365 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:45 crc kubenswrapper[4813]: I0320 16:25:45.413878 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:45 crc kubenswrapper[4813]: I0320 16:25:45.540118 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:45 crc kubenswrapper[4813]: I0320 16:25:45.565252 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:45 crc kubenswrapper[4813]: I0320 16:25:45.642071 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.540251 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.547760 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.565235 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.570636 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.606819 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.631980 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.642206 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:50 crc kubenswrapper[4813]: I0320 16:25:50.665452 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:51 crc kubenswrapper[4813]: I0320 16:25:51.018663 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:51 crc kubenswrapper[4813]: I0320 16:25:51.026991 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:25:51 crc kubenswrapper[4813]: I0320 16:25:51.029552 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:25:51 crc kubenswrapper[4813]: I0320 16:25:51.045778 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:25:51 crc kubenswrapper[4813]: I0320 16:25:51.055315 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:25:53 crc kubenswrapper[4813]: I0320 16:25:53.382061 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:53 crc kubenswrapper[4813]: I0320 16:25:53.382945 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-central-agent" containerID="cri-o://7313b6dc7eb43c28dce26019cf5e007eb0be5941b02e95a37818be456ed0b281" gracePeriod=30 Mar 20 16:25:53 crc kubenswrapper[4813]: I0320 16:25:53.382997 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="sg-core" containerID="cri-o://2071818e4b7d605a4faa95451c241da4ed7fdfb43008edaf7fe65b52da30dbf2" gracePeriod=30 Mar 20 16:25:53 crc kubenswrapper[4813]: I0320 16:25:53.383050 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-notification-agent" containerID="cri-o://e276d9f7f91e9855d81eb87283bb89deb8932e7c2090c0a78a7779573bb4139c" gracePeriod=30 Mar 20 16:25:53 crc kubenswrapper[4813]: I0320 16:25:53.382997 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="proxy-httpd" containerID="cri-o://28b2b2b691a05ab2d7dd38f76b3c798cadbb33b2c1ab7c3ca44e83672c7a52c3" gracePeriod=30 Mar 20 16:25:53 crc kubenswrapper[4813]: I0320 16:25:53.401253 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="watcher-kuttl-default/ceilometer-0" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.043943 4813 generic.go:334] "Generic (PLEG): container finished" podID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerID="28b2b2b691a05ab2d7dd38f76b3c798cadbb33b2c1ab7c3ca44e83672c7a52c3" exitCode=0 Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.044643 4813 generic.go:334] "Generic (PLEG): container finished" podID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerID="2071818e4b7d605a4faa95451c241da4ed7fdfb43008edaf7fe65b52da30dbf2" exitCode=2 Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.044656 4813 generic.go:334] "Generic (PLEG): container finished" podID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerID="e276d9f7f91e9855d81eb87283bb89deb8932e7c2090c0a78a7779573bb4139c" exitCode=0 Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.044664 4813 generic.go:334] "Generic (PLEG): container finished" podID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerID="7313b6dc7eb43c28dce26019cf5e007eb0be5941b02e95a37818be456ed0b281" exitCode=0 Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.044702 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerDied","Data":"28b2b2b691a05ab2d7dd38f76b3c798cadbb33b2c1ab7c3ca44e83672c7a52c3"} Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.044747 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerDied","Data":"2071818e4b7d605a4faa95451c241da4ed7fdfb43008edaf7fe65b52da30dbf2"} Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.044766 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerDied","Data":"e276d9f7f91e9855d81eb87283bb89deb8932e7c2090c0a78a7779573bb4139c"} Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.044779 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerDied","Data":"7313b6dc7eb43c28dce26019cf5e007eb0be5941b02e95a37818be456ed0b281"} Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.210027 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.268996 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-sg-core-conf-yaml\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269061 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-ceilometer-tls-certs\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269101 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-log-httpd\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269166 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nkng\" (UniqueName: \"kubernetes.io/projected/8c913f28-bf56-4cb0-9b77-7a589e30e18b-kube-api-access-9nkng\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269183 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-scripts\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269242 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-combined-ca-bundle\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269288 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-config-data\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269307 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-run-httpd\") pod \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\" (UID: \"8c913f28-bf56-4cb0-9b77-7a589e30e18b\") " Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.269950 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.271964 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.275727 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c913f28-bf56-4cb0-9b77-7a589e30e18b-kube-api-access-9nkng" (OuterVolumeSpecName: "kube-api-access-9nkng") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "kube-api-access-9nkng". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.300618 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-scripts" (OuterVolumeSpecName: "scripts") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.314234 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.344239 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.362773 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.371641 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.371674 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.371683 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.371693 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.371701 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8c913f28-bf56-4cb0-9b77-7a589e30e18b-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.371710 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nkng\" (UniqueName: \"kubernetes.io/projected/8c913f28-bf56-4cb0-9b77-7a589e30e18b-kube-api-access-9nkng\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.371720 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.410757 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-config-data" (OuterVolumeSpecName: "config-data") pod "8c913f28-bf56-4cb0-9b77-7a589e30e18b" (UID: "8c913f28-bf56-4cb0-9b77-7a589e30e18b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:25:54 crc kubenswrapper[4813]: I0320 16:25:54.473359 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c913f28-bf56-4cb0-9b77-7a589e30e18b-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.053707 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"8c913f28-bf56-4cb0-9b77-7a589e30e18b","Type":"ContainerDied","Data":"abde57282c134c2f11680ccc3b142a80f95ef5a4e97b12af7450065c5463fb25"} Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.053765 4813 scope.go:117] "RemoveContainer" containerID="28b2b2b691a05ab2d7dd38f76b3c798cadbb33b2c1ab7c3ca44e83672c7a52c3" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.053774 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.086042 4813 scope.go:117] "RemoveContainer" containerID="2071818e4b7d605a4faa95451c241da4ed7fdfb43008edaf7fe65b52da30dbf2" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.090868 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.098211 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.108971 4813 scope.go:117] "RemoveContainer" containerID="e276d9f7f91e9855d81eb87283bb89deb8932e7c2090c0a78a7779573bb4139c" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.115433 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:55 crc kubenswrapper[4813]: E0320 16:25:55.116461 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="sg-core" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116505 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="sg-core" Mar 20 16:25:55 crc kubenswrapper[4813]: E0320 16:25:55.116527 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-central-agent" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116537 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-central-agent" Mar 20 16:25:55 crc kubenswrapper[4813]: E0320 16:25:55.116570 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-notification-agent" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116578 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-notification-agent" Mar 20 16:25:55 crc kubenswrapper[4813]: E0320 16:25:55.116626 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="proxy-httpd" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116636 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="proxy-httpd" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116857 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-notification-agent" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116906 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="proxy-httpd" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116924 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="ceilometer-central-agent" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.116938 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" containerName="sg-core" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.118919 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.124923 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.125109 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.125118 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.134304 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.142693 4813 scope.go:117] "RemoveContainer" containerID="7313b6dc7eb43c28dce26019cf5e007eb0be5941b02e95a37818be456ed0b281" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.187897 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-log-httpd\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.187941 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.188164 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-config-data\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.188277 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-scripts\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.188321 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.188357 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.188377 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-run-httpd\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.188415 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt6fr\" (UniqueName: \"kubernetes.io/projected/4306ca75-59b5-4d06-b978-b24e4d07cb47-kube-api-access-gt6fr\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.285597 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c913f28-bf56-4cb0-9b77-7a589e30e18b" path="/var/lib/kubelet/pods/8c913f28-bf56-4cb0-9b77-7a589e30e18b/volumes" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290069 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-config-data\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290134 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-scripts\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290169 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290183 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290200 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-run-httpd\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290235 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt6fr\" (UniqueName: \"kubernetes.io/projected/4306ca75-59b5-4d06-b978-b24e4d07cb47-kube-api-access-gt6fr\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290272 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-log-httpd\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290291 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290838 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-log-httpd\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.290910 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-run-httpd\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.295257 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-scripts\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.296556 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.297075 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.297357 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-config-data\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.303718 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.323244 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt6fr\" (UniqueName: \"kubernetes.io/projected/4306ca75-59b5-4d06-b978-b24e4d07cb47-kube-api-access-gt6fr\") pod \"ceilometer-0\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.436654 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:25:55 crc kubenswrapper[4813]: I0320 16:25:55.880619 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:25:55 crc kubenswrapper[4813]: W0320 16:25:55.887802 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4306ca75_59b5_4d06_b978_b24e4d07cb47.slice/crio-79607e46a5a9e761098868f84dfc21e47fffa19c80954276a448ca4827eb4e49 WatchSource:0}: Error finding container 79607e46a5a9e761098868f84dfc21e47fffa19c80954276a448ca4827eb4e49: Status 404 returned error can't find the container with id 79607e46a5a9e761098868f84dfc21e47fffa19c80954276a448ca4827eb4e49 Mar 20 16:25:56 crc kubenswrapper[4813]: I0320 16:25:56.065792 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerStarted","Data":"79607e46a5a9e761098868f84dfc21e47fffa19c80954276a448ca4827eb4e49"} Mar 20 16:25:57 crc kubenswrapper[4813]: I0320 16:25:57.078664 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerStarted","Data":"2fd34ec974044165a9ae243c74185fe8bd35f934f63628267414b3686497abda"} Mar 20 16:25:58 crc kubenswrapper[4813]: I0320 16:25:58.088677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerStarted","Data":"c7e1711bb480f87f00f87d0a924346efbfa04fd76ba9cf11958ce253e4e9eb0b"} Mar 20 16:25:58 crc kubenswrapper[4813]: I0320 16:25:58.088989 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerStarted","Data":"be5b1cb7c2d168ba67b41f15b5efabfee5c762d9dc988bc81e38ab832780de7b"} Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.105778 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerStarted","Data":"55df31da9d653748d3ca82005e099b1091998435d08e4129fbaefb1331e6d884"} Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.106420 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.125582 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.229847494 podStartE2EDuration="5.125552514s" podCreationTimestamp="2026-03-20 16:25:55 +0000 UTC" firstStartedPulling="2026-03-20 16:25:55.8893952 +0000 UTC m=+2885.312098051" lastFinishedPulling="2026-03-20 16:25:59.78510023 +0000 UTC m=+2889.207803071" observedRunningTime="2026-03-20 16:26:00.124111035 +0000 UTC m=+2889.546813876" watchObservedRunningTime="2026-03-20 16:26:00.125552514 +0000 UTC m=+2889.548255375" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.142109 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567066-c9q56"] Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.143264 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567066-c9q56" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.147687 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.148247 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.149315 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.158309 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567066-c9q56"] Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.290111 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4z8s\" (UniqueName: \"kubernetes.io/projected/85e8755f-951d-4604-8d18-b2c7c6d17d27-kube-api-access-x4z8s\") pod \"auto-csr-approver-29567066-c9q56\" (UID: \"85e8755f-951d-4604-8d18-b2c7c6d17d27\") " pod="openshift-infra/auto-csr-approver-29567066-c9q56" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.391204 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4z8s\" (UniqueName: \"kubernetes.io/projected/85e8755f-951d-4604-8d18-b2c7c6d17d27-kube-api-access-x4z8s\") pod \"auto-csr-approver-29567066-c9q56\" (UID: \"85e8755f-951d-4604-8d18-b2c7c6d17d27\") " pod="openshift-infra/auto-csr-approver-29567066-c9q56" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.409772 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4z8s\" (UniqueName: \"kubernetes.io/projected/85e8755f-951d-4604-8d18-b2c7c6d17d27-kube-api-access-x4z8s\") pod \"auto-csr-approver-29567066-c9q56\" (UID: \"85e8755f-951d-4604-8d18-b2c7c6d17d27\") " pod="openshift-infra/auto-csr-approver-29567066-c9q56" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.460024 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567066-c9q56" Mar 20 16:26:00 crc kubenswrapper[4813]: I0320 16:26:00.976583 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567066-c9q56"] Mar 20 16:26:00 crc kubenswrapper[4813]: W0320 16:26:00.984641 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85e8755f_951d_4604_8d18_b2c7c6d17d27.slice/crio-59f8fd62e0545f7dc31e8d4304bed1b781fa99a75e7e797532783bc96c30ff4b WatchSource:0}: Error finding container 59f8fd62e0545f7dc31e8d4304bed1b781fa99a75e7e797532783bc96c30ff4b: Status 404 returned error can't find the container with id 59f8fd62e0545f7dc31e8d4304bed1b781fa99a75e7e797532783bc96c30ff4b Mar 20 16:26:01 crc kubenswrapper[4813]: I0320 16:26:01.114005 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567066-c9q56" event={"ID":"85e8755f-951d-4604-8d18-b2c7c6d17d27","Type":"ContainerStarted","Data":"59f8fd62e0545f7dc31e8d4304bed1b781fa99a75e7e797532783bc96c30ff4b"} Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.230034 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-2"] Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.231902 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.254395 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-2"] Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.321490 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-custom-prometheus-ca\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.321576 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-logs\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.321636 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-config-data\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.321660 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5zp9\" (UniqueName: \"kubernetes.io/projected/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-kube-api-access-w5zp9\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.321675 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-cert-memcached-mtls\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.321705 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-combined-ca-bundle\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.422891 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-custom-prometheus-ca\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.423189 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-logs\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.423234 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-config-data\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.423261 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5zp9\" (UniqueName: \"kubernetes.io/projected/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-kube-api-access-w5zp9\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.423276 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-cert-memcached-mtls\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.423308 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-combined-ca-bundle\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.424617 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-logs\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.429312 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-combined-ca-bundle\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.429697 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-cert-memcached-mtls\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.430647 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-custom-prometheus-ca\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.433363 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-config-data\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.440441 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5zp9\" (UniqueName: \"kubernetes.io/projected/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-kube-api-access-w5zp9\") pod \"watcher-kuttl-api-2\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:02 crc kubenswrapper[4813]: I0320 16:26:02.585192 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:03 crc kubenswrapper[4813]: I0320 16:26:03.042297 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-2"] Mar 20 16:26:03 crc kubenswrapper[4813]: W0320 16:26:03.055955 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1023eeb8_1c11_4e31_bb09_30b6f83ad81e.slice/crio-44aca688d91881abc2648a74c67d104973ae944048082929e1a2f1fe59ccdc69 WatchSource:0}: Error finding container 44aca688d91881abc2648a74c67d104973ae944048082929e1a2f1fe59ccdc69: Status 404 returned error can't find the container with id 44aca688d91881abc2648a74c67d104973ae944048082929e1a2f1fe59ccdc69 Mar 20 16:26:03 crc kubenswrapper[4813]: I0320 16:26:03.142062 4813 generic.go:334] "Generic (PLEG): container finished" podID="85e8755f-951d-4604-8d18-b2c7c6d17d27" containerID="c5cae7a847026d6443dc26b913ce20a2843559592d4de93755c16eca648542fd" exitCode=0 Mar 20 16:26:03 crc kubenswrapper[4813]: I0320 16:26:03.142132 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567066-c9q56" event={"ID":"85e8755f-951d-4604-8d18-b2c7c6d17d27","Type":"ContainerDied","Data":"c5cae7a847026d6443dc26b913ce20a2843559592d4de93755c16eca648542fd"} Mar 20 16:26:03 crc kubenswrapper[4813]: I0320 16:26:03.143785 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-2" event={"ID":"1023eeb8-1c11-4e31-bb09-30b6f83ad81e","Type":"ContainerStarted","Data":"44aca688d91881abc2648a74c67d104973ae944048082929e1a2f1fe59ccdc69"} Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.152706 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-2" event={"ID":"1023eeb8-1c11-4e31-bb09-30b6f83ad81e","Type":"ContainerStarted","Data":"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c"} Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.153047 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-2" event={"ID":"1023eeb8-1c11-4e31-bb09-30b6f83ad81e","Type":"ContainerStarted","Data":"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7"} Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.153378 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.180374 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/watcher-kuttl-api-2" podStartSLOduration=2.180356675 podStartE2EDuration="2.180356675s" podCreationTimestamp="2026-03-20 16:26:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 16:26:04.177476317 +0000 UTC m=+2893.600179158" watchObservedRunningTime="2026-03-20 16:26:04.180356675 +0000 UTC m=+2893.603059516" Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.674955 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567066-c9q56" Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.766106 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4z8s\" (UniqueName: \"kubernetes.io/projected/85e8755f-951d-4604-8d18-b2c7c6d17d27-kube-api-access-x4z8s\") pod \"85e8755f-951d-4604-8d18-b2c7c6d17d27\" (UID: \"85e8755f-951d-4604-8d18-b2c7c6d17d27\") " Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.772773 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85e8755f-951d-4604-8d18-b2c7c6d17d27-kube-api-access-x4z8s" (OuterVolumeSpecName: "kube-api-access-x4z8s") pod "85e8755f-951d-4604-8d18-b2c7c6d17d27" (UID: "85e8755f-951d-4604-8d18-b2c7c6d17d27"). InnerVolumeSpecName "kube-api-access-x4z8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:04 crc kubenswrapper[4813]: I0320 16:26:04.868258 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4z8s\" (UniqueName: \"kubernetes.io/projected/85e8755f-951d-4604-8d18-b2c7c6d17d27-kube-api-access-x4z8s\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:05 crc kubenswrapper[4813]: I0320 16:26:05.163100 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567066-c9q56" Mar 20 16:26:05 crc kubenswrapper[4813]: I0320 16:26:05.163119 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567066-c9q56" event={"ID":"85e8755f-951d-4604-8d18-b2c7c6d17d27","Type":"ContainerDied","Data":"59f8fd62e0545f7dc31e8d4304bed1b781fa99a75e7e797532783bc96c30ff4b"} Mar 20 16:26:05 crc kubenswrapper[4813]: I0320 16:26:05.164388 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59f8fd62e0545f7dc31e8d4304bed1b781fa99a75e7e797532783bc96c30ff4b" Mar 20 16:26:05 crc kubenswrapper[4813]: I0320 16:26:05.755894 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567060-66xlh"] Mar 20 16:26:05 crc kubenswrapper[4813]: I0320 16:26:05.762834 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567060-66xlh"] Mar 20 16:26:06 crc kubenswrapper[4813]: I0320 16:26:06.586800 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:07 crc kubenswrapper[4813]: I0320 16:26:07.273698 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="750e356e-89e8-43b9-9d7d-54cbefcb65bd" path="/var/lib/kubelet/pods/750e356e-89e8-43b9-9d7d-54cbefcb65bd/volumes" Mar 20 16:26:07 crc kubenswrapper[4813]: I0320 16:26:07.586016 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:12 crc kubenswrapper[4813]: I0320 16:26:12.585698 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:12 crc kubenswrapper[4813]: I0320 16:26:12.592263 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:13 crc kubenswrapper[4813]: I0320 16:26:13.238456 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:13 crc kubenswrapper[4813]: I0320 16:26:13.831610 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-2"] Mar 20 16:26:13 crc kubenswrapper[4813]: I0320 16:26:13.840852 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:26:13 crc kubenswrapper[4813]: I0320 16:26:13.841343 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-1" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-kuttl-api-log" containerID="cri-o://e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b" gracePeriod=30 Mar 20 16:26:13 crc kubenswrapper[4813]: I0320 16:26:13.841772 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-1" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-api" containerID="cri-o://ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6" gracePeriod=30 Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.247926 4813 generic.go:334] "Generic (PLEG): container finished" podID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerID="e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b" exitCode=143 Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.248026 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e","Type":"ContainerDied","Data":"e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b"} Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.775229 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.837087 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-cert-memcached-mtls\") pod \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.837158 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdmg8\" (UniqueName: \"kubernetes.io/projected/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-kube-api-access-fdmg8\") pod \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.837203 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-combined-ca-bundle\") pod \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.837233 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-logs\") pod \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.837256 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-custom-prometheus-ca\") pod \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.837310 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-config-data\") pod \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\" (UID: \"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e\") " Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.837672 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-logs" (OuterVolumeSpecName: "logs") pod "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" (UID: "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.838258 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.842571 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-kube-api-access-fdmg8" (OuterVolumeSpecName: "kube-api-access-fdmg8") pod "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" (UID: "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e"). InnerVolumeSpecName "kube-api-access-fdmg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.861024 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" (UID: "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.863962 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" (UID: "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.882753 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-config-data" (OuterVolumeSpecName: "config-data") pod "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" (UID: "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.896890 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" (UID: "b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.939697 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.939726 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdmg8\" (UniqueName: \"kubernetes.io/projected/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-kube-api-access-fdmg8\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.939737 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.939747 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:14 crc kubenswrapper[4813]: I0320 16:26:14.939757 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.266167 4813 generic.go:334] "Generic (PLEG): container finished" podID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerID="ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6" exitCode=0 Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.266316 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-1" Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.266547 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-2" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-kuttl-api-log" containerID="cri-o://cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7" gracePeriod=30 Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.266660 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-2" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-api" containerID="cri-o://bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c" gracePeriod=30 Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.300580 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e","Type":"ContainerDied","Data":"ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6"} Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.300660 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-1" event={"ID":"b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e","Type":"ContainerDied","Data":"9b17a7d1c7ce02d08d60f9b998ab04ad615120b1a2c16a99d594c5b6636eb442"} Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.300700 4813 scope.go:117] "RemoveContainer" containerID="ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6" Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.344915 4813 scope.go:117] "RemoveContainer" containerID="e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b" Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.346874 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.356217 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-1"] Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.378519 4813 scope.go:117] "RemoveContainer" containerID="ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6" Mar 20 16:26:15 crc kubenswrapper[4813]: E0320 16:26:15.381477 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6\": container with ID starting with ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6 not found: ID does not exist" containerID="ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6" Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.381570 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6"} err="failed to get container status \"ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6\": rpc error: code = NotFound desc = could not find container \"ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6\": container with ID starting with ecd3c51ffc1e210f3797be55c314bf43d41129967da7bb75e784969bad8dcff6 not found: ID does not exist" Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.381609 4813 scope.go:117] "RemoveContainer" containerID="e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b" Mar 20 16:26:15 crc kubenswrapper[4813]: E0320 16:26:15.382027 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b\": container with ID starting with e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b not found: ID does not exist" containerID="e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b" Mar 20 16:26:15 crc kubenswrapper[4813]: I0320 16:26:15.382063 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b"} err="failed to get container status \"e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b\": rpc error: code = NotFound desc = could not find container \"e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b\": container with ID starting with e71d5cbd62b552fd8ff2e5e7aa6520440d6f6132005ac950a286846afbaba99b not found: ID does not exist" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.195714 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.260772 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-cert-memcached-mtls\") pod \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.261464 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-custom-prometheus-ca\") pod \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.261531 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-logs\") pod \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.261579 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-combined-ca-bundle\") pod \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.261709 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5zp9\" (UniqueName: \"kubernetes.io/projected/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-kube-api-access-w5zp9\") pod \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.261819 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-config-data\") pod \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\" (UID: \"1023eeb8-1c11-4e31-bb09-30b6f83ad81e\") " Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.262702 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-logs" (OuterVolumeSpecName: "logs") pod "1023eeb8-1c11-4e31-bb09-30b6f83ad81e" (UID: "1023eeb8-1c11-4e31-bb09-30b6f83ad81e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.267183 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-kube-api-access-w5zp9" (OuterVolumeSpecName: "kube-api-access-w5zp9") pod "1023eeb8-1c11-4e31-bb09-30b6f83ad81e" (UID: "1023eeb8-1c11-4e31-bb09-30b6f83ad81e"). InnerVolumeSpecName "kube-api-access-w5zp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.278079 4813 generic.go:334] "Generic (PLEG): container finished" podID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerID="bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c" exitCode=0 Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.278125 4813 generic.go:334] "Generic (PLEG): container finished" podID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerID="cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7" exitCode=143 Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.278204 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-2" event={"ID":"1023eeb8-1c11-4e31-bb09-30b6f83ad81e","Type":"ContainerDied","Data":"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c"} Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.278238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-2" event={"ID":"1023eeb8-1c11-4e31-bb09-30b6f83ad81e","Type":"ContainerDied","Data":"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7"} Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.278250 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-2" event={"ID":"1023eeb8-1c11-4e31-bb09-30b6f83ad81e","Type":"ContainerDied","Data":"44aca688d91881abc2648a74c67d104973ae944048082929e1a2f1fe59ccdc69"} Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.278306 4813 scope.go:117] "RemoveContainer" containerID="bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.278325 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-2" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.297234 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "1023eeb8-1c11-4e31-bb09-30b6f83ad81e" (UID: "1023eeb8-1c11-4e31-bb09-30b6f83ad81e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.298993 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1023eeb8-1c11-4e31-bb09-30b6f83ad81e" (UID: "1023eeb8-1c11-4e31-bb09-30b6f83ad81e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.316633 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-config-data" (OuterVolumeSpecName: "config-data") pod "1023eeb8-1c11-4e31-bb09-30b6f83ad81e" (UID: "1023eeb8-1c11-4e31-bb09-30b6f83ad81e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.339821 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "1023eeb8-1c11-4e31-bb09-30b6f83ad81e" (UID: "1023eeb8-1c11-4e31-bb09-30b6f83ad81e"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.364275 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.364643 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.364655 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.364663 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.364672 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.364682 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5zp9\" (UniqueName: \"kubernetes.io/projected/1023eeb8-1c11-4e31-bb09-30b6f83ad81e-kube-api-access-w5zp9\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.392970 4813 scope.go:117] "RemoveContainer" containerID="cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.419090 4813 scope.go:117] "RemoveContainer" containerID="bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c" Mar 20 16:26:16 crc kubenswrapper[4813]: E0320 16:26:16.431656 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c\": container with ID starting with bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c not found: ID does not exist" containerID="bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.431701 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c"} err="failed to get container status \"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c\": rpc error: code = NotFound desc = could not find container \"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c\": container with ID starting with bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c not found: ID does not exist" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.431726 4813 scope.go:117] "RemoveContainer" containerID="cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7" Mar 20 16:26:16 crc kubenswrapper[4813]: E0320 16:26:16.432902 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7\": container with ID starting with cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7 not found: ID does not exist" containerID="cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.432947 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7"} err="failed to get container status \"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7\": rpc error: code = NotFound desc = could not find container \"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7\": container with ID starting with cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7 not found: ID does not exist" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.432973 4813 scope.go:117] "RemoveContainer" containerID="bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.433323 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c"} err="failed to get container status \"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c\": rpc error: code = NotFound desc = could not find container \"bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c\": container with ID starting with bba9c3e33263f38bb65c8b52f5727092c1c30f0ef37b0deb42584703399adb1c not found: ID does not exist" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.433370 4813 scope.go:117] "RemoveContainer" containerID="cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.434758 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7"} err="failed to get container status \"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7\": rpc error: code = NotFound desc = could not find container \"cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7\": container with ID starting with cc549ab7b176916aef71d01a1e2c949fff8d3c03a195a00ca786cd0130814db7 not found: ID does not exist" Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.610271 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-2"] Mar 20 16:26:16 crc kubenswrapper[4813]: I0320 16:26:16.627574 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-2"] Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.127968 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.128280 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-api" containerID="cri-o://6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5" gracePeriod=30 Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.128474 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-api-0" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-kuttl-api-log" containerID="cri-o://cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12" gracePeriod=30 Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.276879 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" path="/var/lib/kubelet/pods/1023eeb8-1c11-4e31-bb09-30b6f83ad81e/volumes" Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.277595 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" path="/var/lib/kubelet/pods/b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e/volumes" Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.290380 4813 generic.go:334] "Generic (PLEG): container finished" podID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerID="cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12" exitCode=143 Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.290422 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d","Type":"ContainerDied","Data":"cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12"} Mar 20 16:26:17 crc kubenswrapper[4813]: I0320 16:26:17.977015 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.092102 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvktp\" (UniqueName: \"kubernetes.io/projected/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-kube-api-access-hvktp\") pod \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.092974 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-config-data\") pod \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.093020 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-cert-memcached-mtls\") pod \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.093123 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-logs\") pod \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.093178 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-combined-ca-bundle\") pod \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.093225 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-custom-prometheus-ca\") pod \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\" (UID: \"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d\") " Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.093403 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-logs" (OuterVolumeSpecName: "logs") pod "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" (UID: "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.093654 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.096238 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-kube-api-access-hvktp" (OuterVolumeSpecName: "kube-api-access-hvktp") pod "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" (UID: "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d"). InnerVolumeSpecName "kube-api-access-hvktp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.123736 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" (UID: "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.129941 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" (UID: "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.152956 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-config-data" (OuterVolumeSpecName: "config-data") pod "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" (UID: "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.153115 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" (UID: "d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.196011 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvktp\" (UniqueName: \"kubernetes.io/projected/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-kube-api-access-hvktp\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.196041 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.196054 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.196062 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.196070 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.300742 4813 generic.go:334] "Generic (PLEG): container finished" podID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerID="6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5" exitCode=0 Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.300790 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d","Type":"ContainerDied","Data":"6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5"} Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.300851 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-api-0" event={"ID":"d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d","Type":"ContainerDied","Data":"77cb4e8c94f0c09dca2f4e83f917cca1cf753a535c16931ceab6deefeb7108bb"} Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.300875 4813 scope.go:117] "RemoveContainer" containerID="6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.301563 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-api-0" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.349914 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4"] Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.350707 4813 scope.go:117] "RemoveContainer" containerID="cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.382633 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-db-sync-jfkc4"] Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.395560 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.396391 4813 scope.go:117] "RemoveContainer" containerID="6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.404848 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5\": container with ID starting with 6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5 not found: ID does not exist" containerID="6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.404901 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5"} err="failed to get container status \"6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5\": rpc error: code = NotFound desc = could not find container \"6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5\": container with ID starting with 6e64c3fcf6c18928cec3dd089d13d0e217916cdec6c3608566145f1763efe6c5 not found: ID does not exist" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.404931 4813 scope.go:117] "RemoveContainer" containerID="cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.405295 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12\": container with ID starting with cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12 not found: ID does not exist" containerID="cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.405320 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12"} err="failed to get container status \"cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12\": rpc error: code = NotFound desc = could not find container \"cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12\": container with ID starting with cb2730db59d5677f7690f4ee7c5dd6062159ce1e4830ff563094dd2a284cff12 not found: ID does not exist" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.408536 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-api-0"] Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.414625 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/watcherd736-account-delete-w4lxb"] Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.414969 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.414980 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.414989 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.414995 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.415009 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415015 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.415024 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415029 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.415051 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e8755f-951d-4604-8d18-b2c7c6d17d27" containerName="oc" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415056 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e8755f-951d-4604-8d18-b2c7c6d17d27" containerName="oc" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.415068 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415074 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: E0320 16:26:18.415097 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415103 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415240 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415253 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415261 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e8755f-951d-4604-8d18-b2c7c6d17d27" containerName="oc" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415270 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415283 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415293 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1023eeb8-1c11-4e31-bb09-30b6f83ad81e" containerName="watcher-api" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415316 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b19a7ac1-9c40-43ea-b81c-b3ae0ad2e87e" containerName="watcher-kuttl-api-log" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.415852 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.423252 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcherd736-account-delete-w4lxb"] Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.434289 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.434537 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" podUID="7b7d20da-8572-4a49-ae8c-f36e8c123ff1" containerName="watcher-decision-engine" containerID="cri-o://af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1" gracePeriod=30 Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.454574 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.454791 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/watcher-kuttl-applier-0" podUID="7e44045b-211f-4bf2-b884-5248a0dc8fca" containerName="watcher-applier" containerID="cri-o://1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e" gracePeriod=30 Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.500550 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rz7n\" (UniqueName: \"kubernetes.io/projected/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-kube-api-access-5rz7n\") pod \"watcherd736-account-delete-w4lxb\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.500616 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-operator-scripts\") pod \"watcherd736-account-delete-w4lxb\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.602348 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rz7n\" (UniqueName: \"kubernetes.io/projected/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-kube-api-access-5rz7n\") pod \"watcherd736-account-delete-w4lxb\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.602401 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-operator-scripts\") pod \"watcherd736-account-delete-w4lxb\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.603464 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-operator-scripts\") pod \"watcherd736-account-delete-w4lxb\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.627300 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rz7n\" (UniqueName: \"kubernetes.io/projected/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-kube-api-access-5rz7n\") pod \"watcherd736-account-delete-w4lxb\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:18 crc kubenswrapper[4813]: I0320 16:26:18.744009 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.202604 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/watcherd736-account-delete-w4lxb"] Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.278701 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5379087a-bcdc-4d71-a12e-6152c1987c51" path="/var/lib/kubelet/pods/5379087a-bcdc-4d71-a12e-6152c1987c51/volumes" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.279372 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d" path="/var/lib/kubelet/pods/d88ca2e0-5917-4ce5-9fca-c8fdb1f1e76d/volumes" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.321873 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" event={"ID":"757caa1a-3ef4-4bc3-8e74-9a66f7987e74","Type":"ContainerStarted","Data":"0b92622c25ea39bc309322e011f9474e48d4dc2708ad4aad65dc13ae2b5fdef2"} Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.726136 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.778802 4813 scope.go:117] "RemoveContainer" containerID="3f2f7fa5fd73f0e223b1fe70cef7f1e483930a59da4e0f3b5952864956d4bc4b" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.796850 4813 scope.go:117] "RemoveContainer" containerID="342691f8df7c1247bae78fae0011f55b4bb8ea584b388238d2ac73a5066d86a8" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.823989 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-combined-ca-bundle\") pod \"7e44045b-211f-4bf2-b884-5248a0dc8fca\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.824076 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-config-data\") pod \"7e44045b-211f-4bf2-b884-5248a0dc8fca\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.824138 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-cert-memcached-mtls\") pod \"7e44045b-211f-4bf2-b884-5248a0dc8fca\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.824165 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e44045b-211f-4bf2-b884-5248a0dc8fca-logs\") pod \"7e44045b-211f-4bf2-b884-5248a0dc8fca\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.824185 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m425k\" (UniqueName: \"kubernetes.io/projected/7e44045b-211f-4bf2-b884-5248a0dc8fca-kube-api-access-m425k\") pod \"7e44045b-211f-4bf2-b884-5248a0dc8fca\" (UID: \"7e44045b-211f-4bf2-b884-5248a0dc8fca\") " Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.824957 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e44045b-211f-4bf2-b884-5248a0dc8fca-logs" (OuterVolumeSpecName: "logs") pod "7e44045b-211f-4bf2-b884-5248a0dc8fca" (UID: "7e44045b-211f-4bf2-b884-5248a0dc8fca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.829832 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e44045b-211f-4bf2-b884-5248a0dc8fca-kube-api-access-m425k" (OuterVolumeSpecName: "kube-api-access-m425k") pod "7e44045b-211f-4bf2-b884-5248a0dc8fca" (UID: "7e44045b-211f-4bf2-b884-5248a0dc8fca"). InnerVolumeSpecName "kube-api-access-m425k". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.839085 4813 scope.go:117] "RemoveContainer" containerID="219012ed31007297769945ff0ddb04225f14ab5da152e490631ec4ffa2cd82e0" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.878640 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e44045b-211f-4bf2-b884-5248a0dc8fca" (UID: "7e44045b-211f-4bf2-b884-5248a0dc8fca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.896694 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-config-data" (OuterVolumeSpecName: "config-data") pod "7e44045b-211f-4bf2-b884-5248a0dc8fca" (UID: "7e44045b-211f-4bf2-b884-5248a0dc8fca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.902659 4813 scope.go:117] "RemoveContainer" containerID="8db7dae88f5f923d7e7b661f9bccf90056fe91a660d79e798e61379884ea622f" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.910720 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "7e44045b-211f-4bf2-b884-5248a0dc8fca" (UID: "7e44045b-211f-4bf2-b884-5248a0dc8fca"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.925561 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.925593 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.925602 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7e44045b-211f-4bf2-b884-5248a0dc8fca-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.925610 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e44045b-211f-4bf2-b884-5248a0dc8fca-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:19 crc kubenswrapper[4813]: I0320 16:26:19.925619 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m425k\" (UniqueName: \"kubernetes.io/projected/7e44045b-211f-4bf2-b884-5248a0dc8fca-kube-api-access-m425k\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.342661 4813 generic.go:334] "Generic (PLEG): container finished" podID="7e44045b-211f-4bf2-b884-5248a0dc8fca" containerID="1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e" exitCode=0 Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.343581 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-applier-0" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.343599 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"7e44045b-211f-4bf2-b884-5248a0dc8fca","Type":"ContainerDied","Data":"1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e"} Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.344471 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-applier-0" event={"ID":"7e44045b-211f-4bf2-b884-5248a0dc8fca","Type":"ContainerDied","Data":"d3eb9b58be44d8db23731b5afc1b13aa58914577714a78f0aea22233ae112c86"} Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.344518 4813 scope.go:117] "RemoveContainer" containerID="1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.347495 4813 generic.go:334] "Generic (PLEG): container finished" podID="757caa1a-3ef4-4bc3-8e74-9a66f7987e74" containerID="c308ecfe017e239b7cfa0ce0162057e65ba4b3592d72c1cfde47e8d034d68b33" exitCode=0 Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.347526 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" event={"ID":"757caa1a-3ef4-4bc3-8e74-9a66f7987e74","Type":"ContainerDied","Data":"c308ecfe017e239b7cfa0ce0162057e65ba4b3592d72c1cfde47e8d034d68b33"} Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.368168 4813 scope.go:117] "RemoveContainer" containerID="1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e" Mar 20 16:26:20 crc kubenswrapper[4813]: E0320 16:26:20.369071 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e\": container with ID starting with 1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e not found: ID does not exist" containerID="1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.369118 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e"} err="failed to get container status \"1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e\": rpc error: code = NotFound desc = could not find container \"1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e\": container with ID starting with 1e71ad7ac82a8b42025bb20504e52d29697a67f616985477ae540b254ed6649e not found: ID does not exist" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.382063 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.390739 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-applier-0"] Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.810220 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.938782 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-cert-memcached-mtls\") pod \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.939442 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-custom-prometheus-ca\") pod \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.939509 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-config-data\") pod \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.939537 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s8tf\" (UniqueName: \"kubernetes.io/projected/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-kube-api-access-6s8tf\") pod \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.939581 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-logs\") pod \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.939640 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-combined-ca-bundle\") pod \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\" (UID: \"7b7d20da-8572-4a49-ae8c-f36e8c123ff1\") " Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.939938 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-logs" (OuterVolumeSpecName: "logs") pod "7b7d20da-8572-4a49-ae8c-f36e8c123ff1" (UID: "7b7d20da-8572-4a49-ae8c-f36e8c123ff1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.940007 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-logs\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.952919 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-kube-api-access-6s8tf" (OuterVolumeSpecName: "kube-api-access-6s8tf") pod "7b7d20da-8572-4a49-ae8c-f36e8c123ff1" (UID: "7b7d20da-8572-4a49-ae8c-f36e8c123ff1"). InnerVolumeSpecName "kube-api-access-6s8tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.973743 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "7b7d20da-8572-4a49-ae8c-f36e8c123ff1" (UID: "7b7d20da-8572-4a49-ae8c-f36e8c123ff1"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.975416 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b7d20da-8572-4a49-ae8c-f36e8c123ff1" (UID: "7b7d20da-8572-4a49-ae8c-f36e8c123ff1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:20 crc kubenswrapper[4813]: I0320 16:26:20.983699 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-config-data" (OuterVolumeSpecName: "config-data") pod "7b7d20da-8572-4a49-ae8c-f36e8c123ff1" (UID: "7b7d20da-8572-4a49-ae8c-f36e8c123ff1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.007598 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-cert-memcached-mtls" (OuterVolumeSpecName: "cert-memcached-mtls") pod "7b7d20da-8572-4a49-ae8c-f36e8c123ff1" (UID: "7b7d20da-8572-4a49-ae8c-f36e8c123ff1"). InnerVolumeSpecName "cert-memcached-mtls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.041979 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.042020 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s8tf\" (UniqueName: \"kubernetes.io/projected/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-kube-api-access-6s8tf\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.042035 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.042047 4813 reconciler_common.go:293] "Volume detached for volume \"cert-memcached-mtls\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-cert-memcached-mtls\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.042061 4813 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/7b7d20da-8572-4a49-ae8c-f36e8c123ff1-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.097026 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.097340 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-central-agent" containerID="cri-o://2fd34ec974044165a9ae243c74185fe8bd35f934f63628267414b3686497abda" gracePeriod=30 Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.097465 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="proxy-httpd" containerID="cri-o://55df31da9d653748d3ca82005e099b1091998435d08e4129fbaefb1331e6d884" gracePeriod=30 Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.097529 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="sg-core" containerID="cri-o://c7e1711bb480f87f00f87d0a924346efbfa04fd76ba9cf11958ce253e4e9eb0b" gracePeriod=30 Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.097558 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-notification-agent" containerID="cri-o://be5b1cb7c2d168ba67b41f15b5efabfee5c762d9dc988bc81e38ab832780de7b" gracePeriod=30 Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.108269 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.276840 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e44045b-211f-4bf2-b884-5248a0dc8fca" path="/var/lib/kubelet/pods/7e44045b-211f-4bf2-b884-5248a0dc8fca/volumes" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.361621 4813 generic.go:334] "Generic (PLEG): container finished" podID="7b7d20da-8572-4a49-ae8c-f36e8c123ff1" containerID="af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1" exitCode=0 Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.361692 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.361705 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"7b7d20da-8572-4a49-ae8c-f36e8c123ff1","Type":"ContainerDied","Data":"af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1"} Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.361750 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcher-kuttl-decision-engine-0" event={"ID":"7b7d20da-8572-4a49-ae8c-f36e8c123ff1","Type":"ContainerDied","Data":"aa9dd1b011e664b47cedeabc9d3dd5f1e62778c140a276acb4b699db4d867352"} Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.361767 4813 scope.go:117] "RemoveContainer" containerID="af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.366617 4813 generic.go:334] "Generic (PLEG): container finished" podID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerID="55df31da9d653748d3ca82005e099b1091998435d08e4129fbaefb1331e6d884" exitCode=0 Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.366642 4813 generic.go:334] "Generic (PLEG): container finished" podID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerID="c7e1711bb480f87f00f87d0a924346efbfa04fd76ba9cf11958ce253e4e9eb0b" exitCode=2 Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.366654 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerDied","Data":"55df31da9d653748d3ca82005e099b1091998435d08e4129fbaefb1331e6d884"} Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.366696 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerDied","Data":"c7e1711bb480f87f00f87d0a924346efbfa04fd76ba9cf11958ce253e4e9eb0b"} Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.383404 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.390034 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-kuttl-decision-engine-0"] Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.392269 4813 scope.go:117] "RemoveContainer" containerID="af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1" Mar 20 16:26:21 crc kubenswrapper[4813]: E0320 16:26:21.392867 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1\": container with ID starting with af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1 not found: ID does not exist" containerID="af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.392972 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1"} err="failed to get container status \"af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1\": rpc error: code = NotFound desc = could not find container \"af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1\": container with ID starting with af1228c08b8c0433229973360ea19933aa8b5e62443de0839a445267dba131a1 not found: ID does not exist" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.706616 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.756694 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-operator-scripts\") pod \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.756760 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rz7n\" (UniqueName: \"kubernetes.io/projected/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-kube-api-access-5rz7n\") pod \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\" (UID: \"757caa1a-3ef4-4bc3-8e74-9a66f7987e74\") " Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.757225 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "757caa1a-3ef4-4bc3-8e74-9a66f7987e74" (UID: "757caa1a-3ef4-4bc3-8e74-9a66f7987e74"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.760825 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-kube-api-access-5rz7n" (OuterVolumeSpecName: "kube-api-access-5rz7n") pod "757caa1a-3ef4-4bc3-8e74-9a66f7987e74" (UID: "757caa1a-3ef4-4bc3-8e74-9a66f7987e74"). InnerVolumeSpecName "kube-api-access-5rz7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.858916 4813 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:21 crc kubenswrapper[4813]: I0320 16:26:21.859229 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rz7n\" (UniqueName: \"kubernetes.io/projected/757caa1a-3ef4-4bc3-8e74-9a66f7987e74-kube-api-access-5rz7n\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.378363 4813 generic.go:334] "Generic (PLEG): container finished" podID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerID="be5b1cb7c2d168ba67b41f15b5efabfee5c762d9dc988bc81e38ab832780de7b" exitCode=0 Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.378398 4813 generic.go:334] "Generic (PLEG): container finished" podID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerID="2fd34ec974044165a9ae243c74185fe8bd35f934f63628267414b3686497abda" exitCode=0 Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.378453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerDied","Data":"be5b1cb7c2d168ba67b41f15b5efabfee5c762d9dc988bc81e38ab832780de7b"} Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.378477 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerDied","Data":"2fd34ec974044165a9ae243c74185fe8bd35f934f63628267414b3686497abda"} Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.379964 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" event={"ID":"757caa1a-3ef4-4bc3-8e74-9a66f7987e74","Type":"ContainerDied","Data":"0b92622c25ea39bc309322e011f9474e48d4dc2708ad4aad65dc13ae2b5fdef2"} Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.379999 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b92622c25ea39bc309322e011f9474e48d4dc2708ad4aad65dc13ae2b5fdef2" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.380060 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/watcherd736-account-delete-w4lxb" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.775746 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.875290 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-scripts\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.875669 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-config-data\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.875695 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt6fr\" (UniqueName: \"kubernetes.io/projected/4306ca75-59b5-4d06-b978-b24e4d07cb47-kube-api-access-gt6fr\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.876020 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-combined-ca-bundle\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.876048 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-log-httpd\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.876133 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-sg-core-conf-yaml\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.876185 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-run-httpd\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.876247 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-ceilometer-tls-certs\") pod \"4306ca75-59b5-4d06-b978-b24e4d07cb47\" (UID: \"4306ca75-59b5-4d06-b978-b24e4d07cb47\") " Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.876558 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.876781 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.877008 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.881693 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-scripts" (OuterVolumeSpecName: "scripts") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.884633 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4306ca75-59b5-4d06-b978-b24e4d07cb47-kube-api-access-gt6fr" (OuterVolumeSpecName: "kube-api-access-gt6fr") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "kube-api-access-gt6fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.906745 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.944338 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.959382 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-config-data" (OuterVolumeSpecName: "config-data") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.960118 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4306ca75-59b5-4d06-b978-b24e4d07cb47" (UID: "4306ca75-59b5-4d06-b978-b24e4d07cb47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.978320 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4306ca75-59b5-4d06-b978-b24e4d07cb47-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.978615 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.978703 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.978760 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.978813 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt6fr\" (UniqueName: \"kubernetes.io/projected/4306ca75-59b5-4d06-b978-b24e4d07cb47-kube-api-access-gt6fr\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.978875 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:22 crc kubenswrapper[4813]: I0320 16:26:22.978952 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4306ca75-59b5-4d06-b978-b24e4d07cb47-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.276530 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b7d20da-8572-4a49-ae8c-f36e8c123ff1" path="/var/lib/kubelet/pods/7b7d20da-8572-4a49-ae8c-f36e8c123ff1/volumes" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.391328 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"4306ca75-59b5-4d06-b978-b24e4d07cb47","Type":"ContainerDied","Data":"79607e46a5a9e761098868f84dfc21e47fffa19c80954276a448ca4827eb4e49"} Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.391369 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.391383 4813 scope.go:117] "RemoveContainer" containerID="55df31da9d653748d3ca82005e099b1091998435d08e4129fbaefb1331e6d884" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.418330 4813 scope.go:117] "RemoveContainer" containerID="c7e1711bb480f87f00f87d0a924346efbfa04fd76ba9cf11958ce253e4e9eb0b" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.419834 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.433192 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.448162 4813 scope.go:117] "RemoveContainer" containerID="be5b1cb7c2d168ba67b41f15b5efabfee5c762d9dc988bc81e38ab832780de7b" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.457283 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-db-create-c5g9t"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.472817 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:26:23 crc kubenswrapper[4813]: E0320 16:26:23.473229 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="sg-core" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473249 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="sg-core" Mar 20 16:26:23 crc kubenswrapper[4813]: E0320 16:26:23.473263 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-central-agent" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473273 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-central-agent" Mar 20 16:26:23 crc kubenswrapper[4813]: E0320 16:26:23.473283 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="proxy-httpd" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473290 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="proxy-httpd" Mar 20 16:26:23 crc kubenswrapper[4813]: E0320 16:26:23.473298 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="757caa1a-3ef4-4bc3-8e74-9a66f7987e74" containerName="mariadb-account-delete" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473307 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="757caa1a-3ef4-4bc3-8e74-9a66f7987e74" containerName="mariadb-account-delete" Mar 20 16:26:23 crc kubenswrapper[4813]: E0320 16:26:23.473335 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-notification-agent" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473342 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-notification-agent" Mar 20 16:26:23 crc kubenswrapper[4813]: E0320 16:26:23.473363 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7d20da-8572-4a49-ae8c-f36e8c123ff1" containerName="watcher-decision-engine" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473371 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7d20da-8572-4a49-ae8c-f36e8c123ff1" containerName="watcher-decision-engine" Mar 20 16:26:23 crc kubenswrapper[4813]: E0320 16:26:23.473386 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e44045b-211f-4bf2-b884-5248a0dc8fca" containerName="watcher-applier" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473393 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e44045b-211f-4bf2-b884-5248a0dc8fca" containerName="watcher-applier" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473593 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-central-agent" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473611 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7d20da-8572-4a49-ae8c-f36e8c123ff1" containerName="watcher-decision-engine" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473620 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="sg-core" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473630 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="proxy-httpd" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473647 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" containerName="ceilometer-notification-agent" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473662 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="757caa1a-3ef4-4bc3-8e74-9a66f7987e74" containerName="mariadb-account-delete" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.473670 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e44045b-211f-4bf2-b884-5248a0dc8fca" containerName="watcher-applier" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.475688 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.479014 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.483229 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.483229 4813 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.489948 4813 scope.go:117] "RemoveContainer" containerID="2fd34ec974044165a9ae243c74185fe8bd35f934f63628267414b3686497abda" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.490112 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-db-create-c5g9t"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.511990 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcherd736-account-delete-w4lxb"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.538547 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/watcher-d736-account-create-update-vq2rf"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.556293 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcherd736-account-delete-w4lxb"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.564780 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/watcher-d736-account-create-update-vq2rf"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.570760 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590525 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590579 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/112da3cf-6ba4-41d9-b885-88df5555e526-log-httpd\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590599 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-scripts\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590623 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwbls\" (UniqueName: \"kubernetes.io/projected/112da3cf-6ba4-41d9-b885-88df5555e526-kube-api-access-zwbls\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590799 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/112da3cf-6ba4-41d9-b885-88df5555e526-run-httpd\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590875 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-config-data\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.590963 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692699 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/112da3cf-6ba4-41d9-b885-88df5555e526-run-httpd\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692766 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-config-data\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692807 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692857 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692886 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/112da3cf-6ba4-41d9-b885-88df5555e526-log-httpd\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692903 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-scripts\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692924 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwbls\" (UniqueName: \"kubernetes.io/projected/112da3cf-6ba4-41d9-b885-88df5555e526-kube-api-access-zwbls\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.692944 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.693249 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/112da3cf-6ba4-41d9-b885-88df5555e526-run-httpd\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.693941 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/112da3cf-6ba4-41d9-b885-88df5555e526-log-httpd\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.696574 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.696845 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.697308 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.699953 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-scripts\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.706719 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112da3cf-6ba4-41d9-b885-88df5555e526-config-data\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.715214 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwbls\" (UniqueName: \"kubernetes.io/projected/112da3cf-6ba4-41d9-b885-88df5555e526-kube-api-access-zwbls\") pod \"ceilometer-0\" (UID: \"112da3cf-6ba4-41d9-b885-88df5555e526\") " pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:23 crc kubenswrapper[4813]: I0320 16:26:23.821073 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:24 crc kubenswrapper[4813]: I0320 16:26:24.332793 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Mar 20 16:26:24 crc kubenswrapper[4813]: I0320 16:26:24.401658 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"112da3cf-6ba4-41d9-b885-88df5555e526","Type":"ContainerStarted","Data":"b52949725797629e68a88a27f87e136199491ed9accf01bc5fa0798a0358babe"} Mar 20 16:26:25 crc kubenswrapper[4813]: I0320 16:26:25.275600 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4306ca75-59b5-4d06-b978-b24e4d07cb47" path="/var/lib/kubelet/pods/4306ca75-59b5-4d06-b978-b24e4d07cb47/volumes" Mar 20 16:26:25 crc kubenswrapper[4813]: I0320 16:26:25.276864 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="479b8658-bec4-4db2-9ac0-992e8bf35f0d" path="/var/lib/kubelet/pods/479b8658-bec4-4db2-9ac0-992e8bf35f0d/volumes" Mar 20 16:26:25 crc kubenswrapper[4813]: I0320 16:26:25.277464 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="757caa1a-3ef4-4bc3-8e74-9a66f7987e74" path="/var/lib/kubelet/pods/757caa1a-3ef4-4bc3-8e74-9a66f7987e74/volumes" Mar 20 16:26:25 crc kubenswrapper[4813]: I0320 16:26:25.278543 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf" path="/var/lib/kubelet/pods/e39d0c6b-a1d8-4098-b8c5-2519e7ca77cf/volumes" Mar 20 16:26:25 crc kubenswrapper[4813]: I0320 16:26:25.410594 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"112da3cf-6ba4-41d9-b885-88df5555e526","Type":"ContainerStarted","Data":"ea7e3a806f41c4675da891467b36b9de392a60d609e7f7081be4d823cd5f8811"} Mar 20 16:26:26 crc kubenswrapper[4813]: I0320 16:26:26.420450 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"112da3cf-6ba4-41d9-b885-88df5555e526","Type":"ContainerStarted","Data":"23e9814577eb656a957eca4a7cbd9a6a00f18dd7de63588d2396e30df1a7ad58"} Mar 20 16:26:26 crc kubenswrapper[4813]: I0320 16:26:26.421110 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"112da3cf-6ba4-41d9-b885-88df5555e526","Type":"ContainerStarted","Data":"78770c0ee760c826b9a329c2a084ea88fd2bf6d4010ab79ab3274fb5a3f9de6b"} Mar 20 16:26:29 crc kubenswrapper[4813]: I0320 16:26:29.455309 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"112da3cf-6ba4-41d9-b885-88df5555e526","Type":"ContainerStarted","Data":"9e879ec151593fa01c5483300787991707013c77fc41980af59b7b615b892a71"} Mar 20 16:26:29 crc kubenswrapper[4813]: I0320 16:26:29.456637 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:29 crc kubenswrapper[4813]: I0320 16:26:29.486145 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.298521538 podStartE2EDuration="6.486123477s" podCreationTimestamp="2026-03-20 16:26:23 +0000 UTC" firstStartedPulling="2026-03-20 16:26:24.349519684 +0000 UTC m=+2913.772222525" lastFinishedPulling="2026-03-20 16:26:28.537121623 +0000 UTC m=+2917.959824464" observedRunningTime="2026-03-20 16:26:29.479697335 +0000 UTC m=+2918.902400176" watchObservedRunningTime="2026-03-20 16:26:29.486123477 +0000 UTC m=+2918.908826318" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.012052 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c6zb2"] Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.014152 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.023340 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c6zb2"] Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.121250 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgvlc\" (UniqueName: \"kubernetes.io/projected/af426b6f-d574-4859-9285-dcd3f90b89ae-kube-api-access-qgvlc\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.121317 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-catalog-content\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.121509 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-utilities\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.222774 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-utilities\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.222867 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgvlc\" (UniqueName: \"kubernetes.io/projected/af426b6f-d574-4859-9285-dcd3f90b89ae-kube-api-access-qgvlc\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.222894 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-catalog-content\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.223334 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-utilities\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.223350 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-catalog-content\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.245291 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgvlc\" (UniqueName: \"kubernetes.io/projected/af426b6f-d574-4859-9285-dcd3f90b89ae-kube-api-access-qgvlc\") pod \"community-operators-c6zb2\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.337367 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:37 crc kubenswrapper[4813]: I0320 16:26:37.946732 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c6zb2"] Mar 20 16:26:37 crc kubenswrapper[4813]: W0320 16:26:37.956247 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf426b6f_d574_4859_9285_dcd3f90b89ae.slice/crio-518390e979cc0d8823af128329e731ec21a82c31a638bced41bbb064acac39e2 WatchSource:0}: Error finding container 518390e979cc0d8823af128329e731ec21a82c31a638bced41bbb064acac39e2: Status 404 returned error can't find the container with id 518390e979cc0d8823af128329e731ec21a82c31a638bced41bbb064acac39e2 Mar 20 16:26:38 crc kubenswrapper[4813]: I0320 16:26:38.536527 4813 generic.go:334] "Generic (PLEG): container finished" podID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerID="64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557" exitCode=0 Mar 20 16:26:38 crc kubenswrapper[4813]: I0320 16:26:38.536598 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6zb2" event={"ID":"af426b6f-d574-4859-9285-dcd3f90b89ae","Type":"ContainerDied","Data":"64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557"} Mar 20 16:26:38 crc kubenswrapper[4813]: I0320 16:26:38.536639 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6zb2" event={"ID":"af426b6f-d574-4859-9285-dcd3f90b89ae","Type":"ContainerStarted","Data":"518390e979cc0d8823af128329e731ec21a82c31a638bced41bbb064acac39e2"} Mar 20 16:26:38 crc kubenswrapper[4813]: I0320 16:26:38.539032 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 16:26:39 crc kubenswrapper[4813]: I0320 16:26:39.549636 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6zb2" event={"ID":"af426b6f-d574-4859-9285-dcd3f90b89ae","Type":"ContainerStarted","Data":"1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e"} Mar 20 16:26:40 crc kubenswrapper[4813]: I0320 16:26:40.564863 4813 generic.go:334] "Generic (PLEG): container finished" podID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerID="1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e" exitCode=0 Mar 20 16:26:40 crc kubenswrapper[4813]: I0320 16:26:40.564982 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6zb2" event={"ID":"af426b6f-d574-4859-9285-dcd3f90b89ae","Type":"ContainerDied","Data":"1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e"} Mar 20 16:26:41 crc kubenswrapper[4813]: I0320 16:26:41.577919 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6zb2" event={"ID":"af426b6f-d574-4859-9285-dcd3f90b89ae","Type":"ContainerStarted","Data":"2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873"} Mar 20 16:26:41 crc kubenswrapper[4813]: I0320 16:26:41.613963 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c6zb2" podStartSLOduration=3.779362003 podStartE2EDuration="5.613948158s" podCreationTimestamp="2026-03-20 16:26:36 +0000 UTC" firstStartedPulling="2026-03-20 16:26:38.538700633 +0000 UTC m=+2927.961403474" lastFinishedPulling="2026-03-20 16:26:40.373286788 +0000 UTC m=+2929.795989629" observedRunningTime="2026-03-20 16:26:41.607944327 +0000 UTC m=+2931.030647178" watchObservedRunningTime="2026-03-20 16:26:41.613948158 +0000 UTC m=+2931.036650999" Mar 20 16:26:47 crc kubenswrapper[4813]: I0320 16:26:47.338506 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:47 crc kubenswrapper[4813]: I0320 16:26:47.339194 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:47 crc kubenswrapper[4813]: I0320 16:26:47.391740 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:47 crc kubenswrapper[4813]: I0320 16:26:47.683400 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.162897 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9gf9c/must-gather-jf28n"] Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.164833 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.168600 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9gf9c"/"openshift-service-ca.crt" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.168663 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9gf9c"/"default-dockercfg-79g2w" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.170927 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9gf9c"/"kube-root-ca.crt" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.179964 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9gf9c/must-gather-jf28n"] Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.229945 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bjnl\" (UniqueName: \"kubernetes.io/projected/83b66982-c445-4603-9068-896e7c713661-kube-api-access-6bjnl\") pod \"must-gather-jf28n\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.230074 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/83b66982-c445-4603-9068-896e7c713661-must-gather-output\") pod \"must-gather-jf28n\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.331456 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/83b66982-c445-4603-9068-896e7c713661-must-gather-output\") pod \"must-gather-jf28n\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.331566 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bjnl\" (UniqueName: \"kubernetes.io/projected/83b66982-c445-4603-9068-896e7c713661-kube-api-access-6bjnl\") pod \"must-gather-jf28n\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.331888 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/83b66982-c445-4603-9068-896e7c713661-must-gather-output\") pod \"must-gather-jf28n\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.357450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bjnl\" (UniqueName: \"kubernetes.io/projected/83b66982-c445-4603-9068-896e7c713661-kube-api-access-6bjnl\") pod \"must-gather-jf28n\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.485633 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.952246 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9gf9c/must-gather-jf28n"] Mar 20 16:26:50 crc kubenswrapper[4813]: W0320 16:26:50.956294 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83b66982_c445_4603_9068_896e7c713661.slice/crio-cf215ea5404ea60fd2862ce7bf14be0cb53437cb98e7c84723f07f2d5ff8c782 WatchSource:0}: Error finding container cf215ea5404ea60fd2862ce7bf14be0cb53437cb98e7c84723f07f2d5ff8c782: Status 404 returned error can't find the container with id cf215ea5404ea60fd2862ce7bf14be0cb53437cb98e7c84723f07f2d5ff8c782 Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.995046 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c6zb2"] Mar 20 16:26:50 crc kubenswrapper[4813]: I0320 16:26:50.995390 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c6zb2" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="registry-server" containerID="cri-o://2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873" gracePeriod=2 Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.347126 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.451078 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-catalog-content\") pod \"af426b6f-d574-4859-9285-dcd3f90b89ae\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.451241 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-utilities\") pod \"af426b6f-d574-4859-9285-dcd3f90b89ae\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.451387 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgvlc\" (UniqueName: \"kubernetes.io/projected/af426b6f-d574-4859-9285-dcd3f90b89ae-kube-api-access-qgvlc\") pod \"af426b6f-d574-4859-9285-dcd3f90b89ae\" (UID: \"af426b6f-d574-4859-9285-dcd3f90b89ae\") " Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.452127 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-utilities" (OuterVolumeSpecName: "utilities") pod "af426b6f-d574-4859-9285-dcd3f90b89ae" (UID: "af426b6f-d574-4859-9285-dcd3f90b89ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.462763 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af426b6f-d574-4859-9285-dcd3f90b89ae-kube-api-access-qgvlc" (OuterVolumeSpecName: "kube-api-access-qgvlc") pod "af426b6f-d574-4859-9285-dcd3f90b89ae" (UID: "af426b6f-d574-4859-9285-dcd3f90b89ae"). InnerVolumeSpecName "kube-api-access-qgvlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.509995 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af426b6f-d574-4859-9285-dcd3f90b89ae" (UID: "af426b6f-d574-4859-9285-dcd3f90b89ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.552700 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgvlc\" (UniqueName: \"kubernetes.io/projected/af426b6f-d574-4859-9285-dcd3f90b89ae-kube-api-access-qgvlc\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.552733 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.552742 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af426b6f-d574-4859-9285-dcd3f90b89ae-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.688765 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9gf9c/must-gather-jf28n" event={"ID":"83b66982-c445-4603-9068-896e7c713661","Type":"ContainerStarted","Data":"cf215ea5404ea60fd2862ce7bf14be0cb53437cb98e7c84723f07f2d5ff8c782"} Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.693174 4813 generic.go:334] "Generic (PLEG): container finished" podID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerID="2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873" exitCode=0 Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.693212 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6zb2" event={"ID":"af426b6f-d574-4859-9285-dcd3f90b89ae","Type":"ContainerDied","Data":"2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873"} Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.693245 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6zb2" event={"ID":"af426b6f-d574-4859-9285-dcd3f90b89ae","Type":"ContainerDied","Data":"518390e979cc0d8823af128329e731ec21a82c31a638bced41bbb064acac39e2"} Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.693254 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6zb2" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.693271 4813 scope.go:117] "RemoveContainer" containerID="2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.715224 4813 scope.go:117] "RemoveContainer" containerID="1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.744619 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c6zb2"] Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.746284 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c6zb2"] Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.758696 4813 scope.go:117] "RemoveContainer" containerID="64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.797114 4813 scope.go:117] "RemoveContainer" containerID="2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873" Mar 20 16:26:51 crc kubenswrapper[4813]: E0320 16:26:51.797726 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873\": container with ID starting with 2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873 not found: ID does not exist" containerID="2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.797774 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873"} err="failed to get container status \"2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873\": rpc error: code = NotFound desc = could not find container \"2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873\": container with ID starting with 2c43f417fa83d2ea8aa7d2fa44b79e7ba1c887bd9329272653552e191427f873 not found: ID does not exist" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.797878 4813 scope.go:117] "RemoveContainer" containerID="1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e" Mar 20 16:26:51 crc kubenswrapper[4813]: E0320 16:26:51.798185 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e\": container with ID starting with 1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e not found: ID does not exist" containerID="1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.798218 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e"} err="failed to get container status \"1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e\": rpc error: code = NotFound desc = could not find container \"1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e\": container with ID starting with 1db5b7f5f05316cc61e1c1b8c052026c70b1bbb32feec2604ba36bbf71a9a59e not found: ID does not exist" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.798242 4813 scope.go:117] "RemoveContainer" containerID="64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557" Mar 20 16:26:51 crc kubenswrapper[4813]: E0320 16:26:51.799328 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557\": container with ID starting with 64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557 not found: ID does not exist" containerID="64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557" Mar 20 16:26:51 crc kubenswrapper[4813]: I0320 16:26:51.799377 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557"} err="failed to get container status \"64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557\": rpc error: code = NotFound desc = could not find container \"64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557\": container with ID starting with 64d9502d0ed8050c8663a1ca4a0c8c60fc230ea9ff6a83868f2219c57d06d557 not found: ID does not exist" Mar 20 16:26:53 crc kubenswrapper[4813]: I0320 16:26:53.288280 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" path="/var/lib/kubelet/pods/af426b6f-d574-4859-9285-dcd3f90b89ae/volumes" Mar 20 16:26:53 crc kubenswrapper[4813]: I0320 16:26:53.828155 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Mar 20 16:26:56 crc kubenswrapper[4813]: I0320 16:26:56.749129 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9gf9c/must-gather-jf28n" event={"ID":"83b66982-c445-4603-9068-896e7c713661","Type":"ContainerStarted","Data":"15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5"} Mar 20 16:26:56 crc kubenswrapper[4813]: I0320 16:26:56.749739 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9gf9c/must-gather-jf28n" event={"ID":"83b66982-c445-4603-9068-896e7c713661","Type":"ContainerStarted","Data":"9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c"} Mar 20 16:26:56 crc kubenswrapper[4813]: I0320 16:26:56.779429 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9gf9c/must-gather-jf28n" podStartSLOduration=2.226797219 podStartE2EDuration="6.779409884s" podCreationTimestamp="2026-03-20 16:26:50 +0000 UTC" firstStartedPulling="2026-03-20 16:26:50.959323218 +0000 UTC m=+2940.382026059" lastFinishedPulling="2026-03-20 16:26:55.511935883 +0000 UTC m=+2944.934638724" observedRunningTime="2026-03-20 16:26:56.774064041 +0000 UTC m=+2946.196766882" watchObservedRunningTime="2026-03-20 16:26:56.779409884 +0000 UTC m=+2946.202112725" Mar 20 16:27:03 crc kubenswrapper[4813]: I0320 16:27:03.842329 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:27:03 crc kubenswrapper[4813]: I0320 16:27:03.844122 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:27:33 crc kubenswrapper[4813]: I0320 16:27:33.842376 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:27:33 crc kubenswrapper[4813]: I0320 16:27:33.842987 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.151172 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567068-plqw2"] Mar 20 16:28:00 crc kubenswrapper[4813]: E0320 16:28:00.152178 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="extract-content" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.152201 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="extract-content" Mar 20 16:28:00 crc kubenswrapper[4813]: E0320 16:28:00.152226 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="extract-utilities" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.152235 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="extract-utilities" Mar 20 16:28:00 crc kubenswrapper[4813]: E0320 16:28:00.152266 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="registry-server" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.152274 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="registry-server" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.152462 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="af426b6f-d574-4859-9285-dcd3f90b89ae" containerName="registry-server" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.153115 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567068-plqw2" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.155667 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.155829 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.156853 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.158357 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567068-plqw2"] Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.331374 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcsbk\" (UniqueName: \"kubernetes.io/projected/4537163d-0b02-40b2-ad77-ae3ec4022504-kube-api-access-pcsbk\") pod \"auto-csr-approver-29567068-plqw2\" (UID: \"4537163d-0b02-40b2-ad77-ae3ec4022504\") " pod="openshift-infra/auto-csr-approver-29567068-plqw2" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.433218 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcsbk\" (UniqueName: \"kubernetes.io/projected/4537163d-0b02-40b2-ad77-ae3ec4022504-kube-api-access-pcsbk\") pod \"auto-csr-approver-29567068-plqw2\" (UID: \"4537163d-0b02-40b2-ad77-ae3ec4022504\") " pod="openshift-infra/auto-csr-approver-29567068-plqw2" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.454366 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcsbk\" (UniqueName: \"kubernetes.io/projected/4537163d-0b02-40b2-ad77-ae3ec4022504-kube-api-access-pcsbk\") pod \"auto-csr-approver-29567068-plqw2\" (UID: \"4537163d-0b02-40b2-ad77-ae3ec4022504\") " pod="openshift-infra/auto-csr-approver-29567068-plqw2" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.471998 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567068-plqw2" Mar 20 16:28:00 crc kubenswrapper[4813]: I0320 16:28:00.946315 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567068-plqw2"] Mar 20 16:28:01 crc kubenswrapper[4813]: I0320 16:28:01.664046 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567068-plqw2" event={"ID":"4537163d-0b02-40b2-ad77-ae3ec4022504","Type":"ContainerStarted","Data":"5ce43a1b816efd9aa76bb572e151a86acac89d0b0996f56c6fbc2d51743b9a79"} Mar 20 16:28:02 crc kubenswrapper[4813]: I0320 16:28:02.676357 4813 generic.go:334] "Generic (PLEG): container finished" podID="4537163d-0b02-40b2-ad77-ae3ec4022504" containerID="19815ea197757157f0a9184b054ad7dc1eaf28b005e2eaa469c746cf2b1343db" exitCode=0 Mar 20 16:28:02 crc kubenswrapper[4813]: I0320 16:28:02.676463 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567068-plqw2" event={"ID":"4537163d-0b02-40b2-ad77-ae3ec4022504","Type":"ContainerDied","Data":"19815ea197757157f0a9184b054ad7dc1eaf28b005e2eaa469c746cf2b1343db"} Mar 20 16:28:03 crc kubenswrapper[4813]: I0320 16:28:03.843505 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:28:03 crc kubenswrapper[4813]: I0320 16:28:03.843807 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:28:03 crc kubenswrapper[4813]: I0320 16:28:03.843856 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 16:28:03 crc kubenswrapper[4813]: I0320 16:28:03.844681 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8ae1b78d7363e1a932bd9007ead346614378cdac238d4e7161c7374b208edff8"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 16:28:03 crc kubenswrapper[4813]: I0320 16:28:03.844740 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://8ae1b78d7363e1a932bd9007ead346614378cdac238d4e7161c7374b208edff8" gracePeriod=600 Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.111976 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567068-plqw2" Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.313250 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcsbk\" (UniqueName: \"kubernetes.io/projected/4537163d-0b02-40b2-ad77-ae3ec4022504-kube-api-access-pcsbk\") pod \"4537163d-0b02-40b2-ad77-ae3ec4022504\" (UID: \"4537163d-0b02-40b2-ad77-ae3ec4022504\") " Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.319793 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4537163d-0b02-40b2-ad77-ae3ec4022504-kube-api-access-pcsbk" (OuterVolumeSpecName: "kube-api-access-pcsbk") pod "4537163d-0b02-40b2-ad77-ae3ec4022504" (UID: "4537163d-0b02-40b2-ad77-ae3ec4022504"). InnerVolumeSpecName "kube-api-access-pcsbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.414859 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcsbk\" (UniqueName: \"kubernetes.io/projected/4537163d-0b02-40b2-ad77-ae3ec4022504-kube-api-access-pcsbk\") on node \"crc\" DevicePath \"\"" Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.696187 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567068-plqw2" Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.696175 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567068-plqw2" event={"ID":"4537163d-0b02-40b2-ad77-ae3ec4022504","Type":"ContainerDied","Data":"5ce43a1b816efd9aa76bb572e151a86acac89d0b0996f56c6fbc2d51743b9a79"} Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.696554 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ce43a1b816efd9aa76bb572e151a86acac89d0b0996f56c6fbc2d51743b9a79" Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.698654 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="8ae1b78d7363e1a932bd9007ead346614378cdac238d4e7161c7374b208edff8" exitCode=0 Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.698713 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"8ae1b78d7363e1a932bd9007ead346614378cdac238d4e7161c7374b208edff8"} Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.698906 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerStarted","Data":"61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146"} Mar 20 16:28:04 crc kubenswrapper[4813]: I0320 16:28:04.698949 4813 scope.go:117] "RemoveContainer" containerID="7d6c55de2c337ac41972de75044565900e8fbb8bf926027fa780db3b37d44fd2" Mar 20 16:28:04 crc kubenswrapper[4813]: E0320 16:28:04.876611 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4537163d_0b02_40b2_ad77_ae3ec4022504.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4537163d_0b02_40b2_ad77_ae3ec4022504.slice/crio-5ce43a1b816efd9aa76bb572e151a86acac89d0b0996f56c6fbc2d51743b9a79\": RecentStats: unable to find data in memory cache]" Mar 20 16:28:05 crc kubenswrapper[4813]: I0320 16:28:05.182469 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567062-v4l5d"] Mar 20 16:28:05 crc kubenswrapper[4813]: I0320 16:28:05.190670 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567062-v4l5d"] Mar 20 16:28:05 crc kubenswrapper[4813]: I0320 16:28:05.275267 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec" path="/var/lib/kubelet/pods/bfe4a6ec-6522-49dc-a0fc-01e0e71d9aec/volumes" Mar 20 16:28:06 crc kubenswrapper[4813]: I0320 16:28:06.685470 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb_5ad3b123-515a-46c7-be27-6bf55d5823d0/util/0.log" Mar 20 16:28:06 crc kubenswrapper[4813]: I0320 16:28:06.881950 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb_5ad3b123-515a-46c7-be27-6bf55d5823d0/util/0.log" Mar 20 16:28:06 crc kubenswrapper[4813]: I0320 16:28:06.918606 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb_5ad3b123-515a-46c7-be27-6bf55d5823d0/pull/0.log" Mar 20 16:28:06 crc kubenswrapper[4813]: I0320 16:28:06.960191 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb_5ad3b123-515a-46c7-be27-6bf55d5823d0/pull/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.101910 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb_5ad3b123-515a-46c7-be27-6bf55d5823d0/pull/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.101929 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb_5ad3b123-515a-46c7-be27-6bf55d5823d0/util/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.113618 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6e5835fdb519dbfa6963d219a5ec71b0d73708b5df93d36b8aba1618batvcbb_5ad3b123-515a-46c7-be27-6bf55d5823d0/extract/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.506860 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2_bfe1f060-6370-4060-8b43-4e6e95c1f018/util/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.655186 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2_bfe1f060-6370-4060-8b43-4e6e95c1f018/pull/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.714135 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2_bfe1f060-6370-4060-8b43-4e6e95c1f018/util/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.737161 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2_bfe1f060-6370-4060-8b43-4e6e95c1f018/pull/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.955414 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2_bfe1f060-6370-4060-8b43-4e6e95c1f018/util/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.974957 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2_bfe1f060-6370-4060-8b43-4e6e95c1f018/extract/0.log" Mar 20 16:28:07 crc kubenswrapper[4813]: I0320 16:28:07.981826 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8def7df2b499ac483c55b958477ae70d30de43fc359ef64c96f9aee971khqb2_bfe1f060-6370-4060-8b43-4e6e95c1f018/pull/0.log" Mar 20 16:28:08 crc kubenswrapper[4813]: I0320 16:28:08.183221 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59bc569d95-2l2s5_7c7640f5-78e7-4dba-8900-26dc47eb640f/manager/0.log" Mar 20 16:28:08 crc kubenswrapper[4813]: I0320 16:28:08.475112 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-79df6bcc97-z8zt6_200b39b4-9995-48fc-a31f-1708526bd9d8/manager/0.log" Mar 20 16:28:08 crc kubenswrapper[4813]: I0320 16:28:08.482947 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-588d4d986b-lzq5s_6fdb989b-19f2-4bc2-97c6-2dfa1bb4cfbe/manager/0.log" Mar 20 16:28:08 crc kubenswrapper[4813]: I0320 16:28:08.753291 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-67dd5f86f5-mqm85_829ecd99-4a08-4965-ab30-fb30ab8e2ead/manager/0.log" Mar 20 16:28:08 crc kubenswrapper[4813]: I0320 16:28:08.998779 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-8464cc45fb-v8t7w_be5e97ef-9ad5-4663-bbf1-69573e2eedeb/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.190138 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d58dc466-q88gq_f0d2c31f-5223-4222-afa8-ba918af23dca/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.249613 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7b9c774f96-qzktk_bcb8373c-ae7c-4646-a90a-fe965f70c9bd/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.301948 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f787dddc9-r7v2l_7f918684-13af-4141-a414-9ac7b87e75d9/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.540147 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-55f864c847-6xnn6_94489fb6-8195-4820-b4bf-87122803836a/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.614299 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-768b96df4c-kvw9r_35420d07-3f39-47f7-bc13-d5fc95954674/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.730406 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67ccfc9778-cff9z_a314b9f5-f2d9-445c-bf5d-a42dc479c21d/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.846179 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-767865f676-fstk4_aee070f5-c22b-4b69-b116-685316825aaa/manager/0.log" Mar 20 16:28:09 crc kubenswrapper[4813]: I0320 16:28:09.899777 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5d488d59fb-vj727_6c1e7d53-7b72-4c38-bca1-94db6fd742d2/manager/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.043898 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-5b9f45d989-rncs2_e36b96c5-7bfb-4657-b69b-7eaeeff3b477/manager/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.096831 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-89d64c458-ntg7z_7a422362-8c7e-4943-ad13-ca4089978ef9/manager/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.296466 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-46f5x_96e2a06f-6d79-4e77-af9f-a5bc8959477a/registry-server/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.640861 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-884679f54-xvk2s_3e11ecc5-9159-4144-a583-1b657f2349ea/manager/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.692728 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6f58c59cbb-66v95_94bef8a0-0916-46e0-9253-8130ef2367da/manager/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.769882 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5784578c99-gpxm6_1847c24e-09bc-44da-9343-5ae3f93c1dd1/manager/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.877048 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-jmwcp_d09f9f03-5f5c-4b17-b4b7-81ddc051aef3/operator/0.log" Mar 20 16:28:10 crc kubenswrapper[4813]: I0320 16:28:10.993538 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-c674c5965-nqn84_a1747eb3-27f1-4f93-85c4-d786b8730bc9/manager/0.log" Mar 20 16:28:11 crc kubenswrapper[4813]: I0320 16:28:11.330226 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5c5cb9c4d7-5rxh5_2ce265a6-3022-420a-99dc-0cead55c568f/manager/0.log" Mar 20 16:28:11 crc kubenswrapper[4813]: I0320 16:28:11.369338 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-d6b694c5-2xj66_31acb64e-576e-4b51-a8a3-37162d9161c0/manager/0.log" Mar 20 16:28:11 crc kubenswrapper[4813]: I0320 16:28:11.665537 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-index-5c49m_a75a77fe-87c9-4704-8698-f6b526264fea/registry-server/0.log" Mar 20 16:28:11 crc kubenswrapper[4813]: I0320 16:28:11.869469 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-77c9f8cb5b-z626z_19a528ae-3ae2-48a5-88cb-04a92b073043/manager/0.log" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.226969 4813 scope.go:117] "RemoveContainer" containerID="c8201c24790ff7ecc798a8584d9459c973b40619fda3cb355f7141e0dcbf3ddd" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.286365 4813 scope.go:117] "RemoveContainer" containerID="4e5aaf8a3ff11a246a2864b20214d0fdb566ee007b31116e096ff536cfb8c72a" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.315133 4813 scope.go:117] "RemoveContainer" containerID="5eaa6587465e7c4640c9411426d0a266673fa422dc272fed1355ae3d3104bcd8" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.363919 4813 scope.go:117] "RemoveContainer" containerID="6e78257eb6b5aa83d204d8551cb15c21ca778b5facf51ba09ebc2dfb5bc40eab" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.384020 4813 scope.go:117] "RemoveContainer" containerID="d04e584f99e8e99b355c27e950835cfefed16595430064e9cb9d277fc31aa0e5" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.413593 4813 scope.go:117] "RemoveContainer" containerID="5cb3b5aa8eaa561c164cfc9b67551c982dc1ab06a2ccc54e95503c98ed645824" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.458754 4813 scope.go:117] "RemoveContainer" containerID="54adb3f5edab8b279a4a0256705cf63f186f5e730a39ddff7fc36897da581103" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.482849 4813 scope.go:117] "RemoveContainer" containerID="f3ed76ae2142273e540efabdcb5753d4a8a48626a75679e34c074d4a31d58726" Mar 20 16:28:20 crc kubenswrapper[4813]: I0320 16:28:20.499936 4813 scope.go:117] "RemoveContainer" containerID="aea28451ed8059a135210fea4ab34e3fc4bcf158fd3542818a36ed0ee0c6d13a" Mar 20 16:28:33 crc kubenswrapper[4813]: I0320 16:28:33.699978 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-lzrhk_1dce783b-8d1e-4171-8409-be2d773e2ab0/control-plane-machine-set-operator/0.log" Mar 20 16:28:33 crc kubenswrapper[4813]: I0320 16:28:33.924193 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hszjg_ef59e32b-dd5c-4beb-b348-67c4847e80ce/machine-api-operator/0.log" Mar 20 16:28:33 crc kubenswrapper[4813]: I0320 16:28:33.976712 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hszjg_ef59e32b-dd5c-4beb-b348-67c4847e80ce/kube-rbac-proxy/0.log" Mar 20 16:28:48 crc kubenswrapper[4813]: I0320 16:28:48.013178 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-545d4d4674-7rt5m_1de6b68c-a264-4e69-8f1d-5427473d0b31/cert-manager-controller/0.log" Mar 20 16:28:48 crc kubenswrapper[4813]: I0320 16:28:48.249192 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-5545bd876-st7jn_3abcfa4b-b69a-41ea-83e2-589697723bef/cert-manager-cainjector/0.log" Mar 20 16:28:48 crc kubenswrapper[4813]: I0320 16:28:48.255884 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-6888856db4-n56d9_d9ff7c1b-30f8-49bf-b626-3911d0943736/cert-manager-webhook/0.log" Mar 20 16:29:03 crc kubenswrapper[4813]: I0320 16:29:03.327970 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-86f58fcf4-7shs2_56e21629-308a-4212-ac84-81570cb43089/nmstate-console-plugin/0.log" Mar 20 16:29:03 crc kubenswrapper[4813]: I0320 16:29:03.768098 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-52hfr_b8bbc7f2-8378-46a1-9cc1-20c8366b22f4/nmstate-handler/0.log" Mar 20 16:29:03 crc kubenswrapper[4813]: I0320 16:29:03.807589 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-9b8c8685d-xfs5s_474448b5-47ff-4f1b-a127-f3192ec0a283/nmstate-metrics/0.log" Mar 20 16:29:03 crc kubenswrapper[4813]: I0320 16:29:03.838164 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-9b8c8685d-xfs5s_474448b5-47ff-4f1b-a127-f3192ec0a283/kube-rbac-proxy/0.log" Mar 20 16:29:03 crc kubenswrapper[4813]: I0320 16:29:03.998541 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-796d4cfff4-s52xz_6fd819ab-3d0b-427b-b29c-bf57d746a6a3/nmstate-operator/0.log" Mar 20 16:29:04 crc kubenswrapper[4813]: I0320 16:29:04.017358 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f558f5558-xbp9m_980f1609-b4ad-464a-93cd-895066f63c92/nmstate-webhook/0.log" Mar 20 16:29:19 crc kubenswrapper[4813]: I0320 16:29:19.380953 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-8ff7d675-zbjnw_13a0d7fe-7887-4ea4-ae5b-b47d8689373b/prometheus-operator/0.log" Mar 20 16:29:19 crc kubenswrapper[4813]: I0320 16:29:19.776186 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_fb3c3550-7cd9-4286-8ab3-e554c159b357/prometheus-operator-admission-webhook/0.log" Mar 20 16:29:19 crc kubenswrapper[4813]: I0320 16:29:19.836788 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_4ac554a5-8e12-415d-845c-72c909a7d1d2/prometheus-operator-admission-webhook/0.log" Mar 20 16:29:19 crc kubenswrapper[4813]: I0320 16:29:19.972895 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-6dd7dd855f-bvk5s_9cbfb8c9-4eb6-4382-b8b1-572029001cc0/operator/0.log" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.084615 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-ui-dashboards-7f87b9b85b-6t9mn_6c22deaf-5054-462f-bc80-f21fedd71c41/observability-ui-dashboards/0.log" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.182466 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-b9bc87685-dshn8_95615498-4862-4d52-9c91-6f2f4f17d4ba/perses-operator/0.log" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.663513 4813 scope.go:117] "RemoveContainer" containerID="2ad6eeaa655d9384ff0a27290dae6ae1dc1d68b5c8bf9588b0e8a2898258b885" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.691993 4813 scope.go:117] "RemoveContainer" containerID="f497e899a329e1a5c772467bdabd0280e11ec6ce83f73b3fd923e833da7fb878" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.720859 4813 scope.go:117] "RemoveContainer" containerID="2572dd65a89aca93eb1ecec3947667d411cca61d300b1942b0d0d8ccc966f394" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.771385 4813 scope.go:117] "RemoveContainer" containerID="0b1d34bed68049152622c4bfcb1ab980ca1bf99511f3d054ac4ed905e6643db3" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.790949 4813 scope.go:117] "RemoveContainer" containerID="4ce0f2637c7fb74b111fe0fc69ee6d91846b8fcd129a98dc40b76a1f83655f4c" Mar 20 16:29:20 crc kubenswrapper[4813]: I0320 16:29:20.839538 4813 scope.go:117] "RemoveContainer" containerID="bdcf09e85c4ef150f45b2a4da56f2f230e560fc2426d35e5a7a5579772bfc071" Mar 20 16:29:36 crc kubenswrapper[4813]: I0320 16:29:36.430232 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-7bb4cc7c98-f45rt_c9ca7072-6004-41c6-8090-d5bd3369994b/kube-rbac-proxy/0.log" Mar 20 16:29:36 crc kubenswrapper[4813]: I0320 16:29:36.435302 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-7bb4cc7c98-f45rt_c9ca7072-6004-41c6-8090-d5bd3369994b/controller/0.log" Mar 20 16:29:36 crc kubenswrapper[4813]: I0320 16:29:36.637323 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-frr-files/0.log" Mar 20 16:29:36 crc kubenswrapper[4813]: I0320 16:29:36.994764 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-reloader/0.log" Mar 20 16:29:36 crc kubenswrapper[4813]: I0320 16:29:36.994798 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-frr-files/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.021619 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-metrics/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.067246 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-reloader/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.299907 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-frr-files/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.332382 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-metrics/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.332850 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-reloader/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.335953 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-metrics/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.497202 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-frr-files/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.556353 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-reloader/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.572640 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/controller/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.581931 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/cp-metrics/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.877563 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/kube-rbac-proxy-frr/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.890264 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/frr-metrics/0.log" Mar 20 16:29:37 crc kubenswrapper[4813]: I0320 16:29:37.890811 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/kube-rbac-proxy/0.log" Mar 20 16:29:38 crc kubenswrapper[4813]: I0320 16:29:38.155293 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/reloader/0.log" Mar 20 16:29:38 crc kubenswrapper[4813]: I0320 16:29:38.219841 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-bcc4b6f68-fxssk_5d96ab75-0ca7-47bc-b85d-b39649c76561/frr-k8s-webhook-server/0.log" Mar 20 16:29:38 crc kubenswrapper[4813]: I0320 16:29:38.476184 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-bfc448998-db8w6_28b6ff98-fd32-44bd-8ab0-9395fa76e6f8/manager/0.log" Mar 20 16:29:38 crc kubenswrapper[4813]: I0320 16:29:38.759493 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-795784bfcc-vcd2s_5673dcee-fdfe-4f75-9dca-b35a64c13bea/webhook-server/0.log" Mar 20 16:29:38 crc kubenswrapper[4813]: I0320 16:29:38.854364 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-skmm6_b9924c18-4463-4445-a18f-5bd9e6ec1334/kube-rbac-proxy/0.log" Mar 20 16:29:39 crc kubenswrapper[4813]: I0320 16:29:39.220547 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-fbdp4_090f54f0-8cbc-4ecf-b792-c160a26595ff/frr/0.log" Mar 20 16:29:39 crc kubenswrapper[4813]: I0320 16:29:39.251580 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-skmm6_b9924c18-4463-4445-a18f-5bd9e6ec1334/speaker/0.log" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.145475 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567070-h4fjk"] Mar 20 16:30:00 crc kubenswrapper[4813]: E0320 16:30:00.146465 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4537163d-0b02-40b2-ad77-ae3ec4022504" containerName="oc" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.146498 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4537163d-0b02-40b2-ad77-ae3ec4022504" containerName="oc" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.146702 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4537163d-0b02-40b2-ad77-ae3ec4022504" containerName="oc" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.147382 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.155565 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx"] Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.156748 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.157497 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.157477 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.157789 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.168403 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567070-h4fjk"] Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.168438 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.168579 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.179158 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx"] Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.329795 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7964d290-dc58-43ae-bbf5-461ce864849c-config-volume\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.329940 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7964d290-dc58-43ae-bbf5-461ce864849c-secret-volume\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.329964 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcmp6\" (UniqueName: \"kubernetes.io/projected/7964d290-dc58-43ae-bbf5-461ce864849c-kube-api-access-wcmp6\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.330013 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b67w\" (UniqueName: \"kubernetes.io/projected/10c98d88-c20b-4d75-b340-5ca0096c9a0f-kube-api-access-5b67w\") pod \"auto-csr-approver-29567070-h4fjk\" (UID: \"10c98d88-c20b-4d75-b340-5ca0096c9a0f\") " pod="openshift-infra/auto-csr-approver-29567070-h4fjk" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.431318 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7964d290-dc58-43ae-bbf5-461ce864849c-config-volume\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.431429 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7964d290-dc58-43ae-bbf5-461ce864849c-secret-volume\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.431450 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcmp6\" (UniqueName: \"kubernetes.io/projected/7964d290-dc58-43ae-bbf5-461ce864849c-kube-api-access-wcmp6\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.431473 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b67w\" (UniqueName: \"kubernetes.io/projected/10c98d88-c20b-4d75-b340-5ca0096c9a0f-kube-api-access-5b67w\") pod \"auto-csr-approver-29567070-h4fjk\" (UID: \"10c98d88-c20b-4d75-b340-5ca0096c9a0f\") " pod="openshift-infra/auto-csr-approver-29567070-h4fjk" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.432466 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7964d290-dc58-43ae-bbf5-461ce864849c-config-volume\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.454203 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7964d290-dc58-43ae-bbf5-461ce864849c-secret-volume\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.457412 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b67w\" (UniqueName: \"kubernetes.io/projected/10c98d88-c20b-4d75-b340-5ca0096c9a0f-kube-api-access-5b67w\") pod \"auto-csr-approver-29567070-h4fjk\" (UID: \"10c98d88-c20b-4d75-b340-5ca0096c9a0f\") " pod="openshift-infra/auto-csr-approver-29567070-h4fjk" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.461072 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcmp6\" (UniqueName: \"kubernetes.io/projected/7964d290-dc58-43ae-bbf5-461ce864849c-kube-api-access-wcmp6\") pod \"collect-profiles-29567070-lbqxx\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.481230 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.495845 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.948031 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567070-h4fjk"] Mar 20 16:30:00 crc kubenswrapper[4813]: W0320 16:30:00.996610 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7964d290_dc58_43ae_bbf5_461ce864849c.slice/crio-d2806f637b8abc061f1273a1bb0400101a0b51486a5a642bae07c7bc7a76cc28 WatchSource:0}: Error finding container d2806f637b8abc061f1273a1bb0400101a0b51486a5a642bae07c7bc7a76cc28: Status 404 returned error can't find the container with id d2806f637b8abc061f1273a1bb0400101a0b51486a5a642bae07c7bc7a76cc28 Mar 20 16:30:00 crc kubenswrapper[4813]: I0320 16:30:00.999301 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx"] Mar 20 16:30:01 crc kubenswrapper[4813]: I0320 16:30:01.661654 4813 generic.go:334] "Generic (PLEG): container finished" podID="7964d290-dc58-43ae-bbf5-461ce864849c" containerID="f0a2ca5ddb00759218efd714901286a0348c652443b136f08c65012eca74ba0c" exitCode=0 Mar 20 16:30:01 crc kubenswrapper[4813]: I0320 16:30:01.661711 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" event={"ID":"7964d290-dc58-43ae-bbf5-461ce864849c","Type":"ContainerDied","Data":"f0a2ca5ddb00759218efd714901286a0348c652443b136f08c65012eca74ba0c"} Mar 20 16:30:01 crc kubenswrapper[4813]: I0320 16:30:01.661926 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" event={"ID":"7964d290-dc58-43ae-bbf5-461ce864849c","Type":"ContainerStarted","Data":"d2806f637b8abc061f1273a1bb0400101a0b51486a5a642bae07c7bc7a76cc28"} Mar 20 16:30:01 crc kubenswrapper[4813]: I0320 16:30:01.663235 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" event={"ID":"10c98d88-c20b-4d75-b340-5ca0096c9a0f","Type":"ContainerStarted","Data":"d6598fe2f1cc3d69c0cf0fa1db9fecde320885d36b2a1b35da27d54a07bf7455"} Mar 20 16:30:02 crc kubenswrapper[4813]: I0320 16:30:02.066887 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-z6ggq"] Mar 20 16:30:02 crc kubenswrapper[4813]: I0320 16:30:02.082005 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-z6ggq"] Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.009139 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.183967 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7964d290-dc58-43ae-bbf5-461ce864849c-secret-volume\") pod \"7964d290-dc58-43ae-bbf5-461ce864849c\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.184515 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcmp6\" (UniqueName: \"kubernetes.io/projected/7964d290-dc58-43ae-bbf5-461ce864849c-kube-api-access-wcmp6\") pod \"7964d290-dc58-43ae-bbf5-461ce864849c\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.184553 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7964d290-dc58-43ae-bbf5-461ce864849c-config-volume\") pod \"7964d290-dc58-43ae-bbf5-461ce864849c\" (UID: \"7964d290-dc58-43ae-bbf5-461ce864849c\") " Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.185418 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7964d290-dc58-43ae-bbf5-461ce864849c-config-volume" (OuterVolumeSpecName: "config-volume") pod "7964d290-dc58-43ae-bbf5-461ce864849c" (UID: "7964d290-dc58-43ae-bbf5-461ce864849c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.191123 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7964d290-dc58-43ae-bbf5-461ce864849c-kube-api-access-wcmp6" (OuterVolumeSpecName: "kube-api-access-wcmp6") pod "7964d290-dc58-43ae-bbf5-461ce864849c" (UID: "7964d290-dc58-43ae-bbf5-461ce864849c"). InnerVolumeSpecName "kube-api-access-wcmp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.191215 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7964d290-dc58-43ae-bbf5-461ce864849c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7964d290-dc58-43ae-bbf5-461ce864849c" (UID: "7964d290-dc58-43ae-bbf5-461ce864849c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.275650 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10bd89d7-817c-468e-bf64-fe24b73ea2a2" path="/var/lib/kubelet/pods/10bd89d7-817c-468e-bf64-fe24b73ea2a2/volumes" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.285506 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7964d290-dc58-43ae-bbf5-461ce864849c-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.285533 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7964d290-dc58-43ae-bbf5-461ce864849c-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.285544 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcmp6\" (UniqueName: \"kubernetes.io/projected/7964d290-dc58-43ae-bbf5-461ce864849c-kube-api-access-wcmp6\") on node \"crc\" DevicePath \"\"" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.681493 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" event={"ID":"7964d290-dc58-43ae-bbf5-461ce864849c","Type":"ContainerDied","Data":"d2806f637b8abc061f1273a1bb0400101a0b51486a5a642bae07c7bc7a76cc28"} Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.681526 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2806f637b8abc061f1273a1bb0400101a0b51486a5a642bae07c7bc7a76cc28" Mar 20 16:30:03 crc kubenswrapper[4813]: I0320 16:30:03.681556 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29567070-lbqxx" Mar 20 16:30:04 crc kubenswrapper[4813]: I0320 16:30:04.119102 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq"] Mar 20 16:30:04 crc kubenswrapper[4813]: I0320 16:30:04.131508 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29567025-flhmq"] Mar 20 16:30:04 crc kubenswrapper[4813]: I0320 16:30:04.689791 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" event={"ID":"10c98d88-c20b-4d75-b340-5ca0096c9a0f","Type":"ContainerStarted","Data":"5050f20a0aef5ab08890cba57b8184a8887f4a3a4ba501e266a49ca322da012f"} Mar 20 16:30:04 crc kubenswrapper[4813]: I0320 16:30:04.702323 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" podStartSLOduration=1.543913152 podStartE2EDuration="4.702302591s" podCreationTimestamp="2026-03-20 16:30:00 +0000 UTC" firstStartedPulling="2026-03-20 16:30:00.947953662 +0000 UTC m=+3130.370656493" lastFinishedPulling="2026-03-20 16:30:04.106343091 +0000 UTC m=+3133.529045932" observedRunningTime="2026-03-20 16:30:04.700757 +0000 UTC m=+3134.123459841" watchObservedRunningTime="2026-03-20 16:30:04.702302591 +0000 UTC m=+3134.125005462" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.038506 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_6dcc5c20-eb16-44c2-be60-17a397527235/init-config-reloader/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.190029 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_6dcc5c20-eb16-44c2-be60-17a397527235/init-config-reloader/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.254863 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_6dcc5c20-eb16-44c2-be60-17a397527235/alertmanager/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.265473 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_6dcc5c20-eb16-44c2-be60-17a397527235/config-reloader/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.280025 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81d21365-a70e-4490-a592-ee8f126e1e61" path="/var/lib/kubelet/pods/81d21365-a70e-4490-a592-ee8f126e1e61/volumes" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.444394 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_112da3cf-6ba4-41d9-b885-88df5555e526/ceilometer-central-agent/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.472547 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_112da3cf-6ba4-41d9-b885-88df5555e526/ceilometer-notification-agent/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.474328 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_112da3cf-6ba4-41d9-b885-88df5555e526/proxy-httpd/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.477929 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_112da3cf-6ba4-41d9-b885-88df5555e526/sg-core/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.696824 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_keystone-cron-29567041-94xtk_d3a49c21-5df2-4013-b93e-8314741903e6/keystone-cron/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.697045 4813 generic.go:334] "Generic (PLEG): container finished" podID="10c98d88-c20b-4d75-b340-5ca0096c9a0f" containerID="5050f20a0aef5ab08890cba57b8184a8887f4a3a4ba501e266a49ca322da012f" exitCode=0 Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.697082 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" event={"ID":"10c98d88-c20b-4d75-b340-5ca0096c9a0f","Type":"ContainerDied","Data":"5050f20a0aef5ab08890cba57b8184a8887f4a3a4ba501e266a49ca322da012f"} Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.719979 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_keystone-5d69f9588b-zsrh7_488cca2e-7391-4ebb-9f9d-e6d5f04e94c4/keystone-api/0.log" Mar 20 16:30:05 crc kubenswrapper[4813]: I0320 16:30:05.961710 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_kube-state-metrics-0_c5449776-80e0-4686-bd0b-2fd27d88ed52/kube-state-metrics/0.log" Mar 20 16:30:06 crc kubenswrapper[4813]: I0320 16:30:06.385422 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstack-galera-0_3367f79a-2519-4c2d-8563-fac811678ed3/mysql-bootstrap/0.log" Mar 20 16:30:06 crc kubenswrapper[4813]: I0320 16:30:06.571733 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstack-galera-0_3367f79a-2519-4c2d-8563-fac811678ed3/mysql-bootstrap/0.log" Mar 20 16:30:06 crc kubenswrapper[4813]: I0320 16:30:06.643364 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstack-galera-0_3367f79a-2519-4c2d-8563-fac811678ed3/galera/0.log" Mar 20 16:30:06 crc kubenswrapper[4813]: I0320 16:30:06.890806 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstackclient_8cf25919-005b-4b34-98eb-28cacad075b4/openstackclient/0.log" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.105340 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_d2f088cf-8304-46b5-af82-96110c742638/init-config-reloader/0.log" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.125683 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.241637 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b67w\" (UniqueName: \"kubernetes.io/projected/10c98d88-c20b-4d75-b340-5ca0096c9a0f-kube-api-access-5b67w\") pod \"10c98d88-c20b-4d75-b340-5ca0096c9a0f\" (UID: \"10c98d88-c20b-4d75-b340-5ca0096c9a0f\") " Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.247422 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10c98d88-c20b-4d75-b340-5ca0096c9a0f-kube-api-access-5b67w" (OuterVolumeSpecName: "kube-api-access-5b67w") pod "10c98d88-c20b-4d75-b340-5ca0096c9a0f" (UID: "10c98d88-c20b-4d75-b340-5ca0096c9a0f"). InnerVolumeSpecName "kube-api-access-5b67w". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.359178 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b67w\" (UniqueName: \"kubernetes.io/projected/10c98d88-c20b-4d75-b340-5ca0096c9a0f-kube-api-access-5b67w\") on node \"crc\" DevicePath \"\"" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.364668 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_d2f088cf-8304-46b5-af82-96110c742638/config-reloader/0.log" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.414577 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_d2f088cf-8304-46b5-af82-96110c742638/init-config-reloader/0.log" Mar 20 16:30:07 crc kubenswrapper[4813]: E0320 16:30:07.444013 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10c98d88_c20b_4d75_b340_5ca0096c9a0f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10c98d88_c20b_4d75_b340_5ca0096c9a0f.slice/crio-d6598fe2f1cc3d69c0cf0fa1db9fecde320885d36b2a1b35da27d54a07bf7455\": RecentStats: unable to find data in memory cache]" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.490959 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_d2f088cf-8304-46b5-af82-96110c742638/prometheus/0.log" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.714787 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" event={"ID":"10c98d88-c20b-4d75-b340-5ca0096c9a0f","Type":"ContainerDied","Data":"d6598fe2f1cc3d69c0cf0fa1db9fecde320885d36b2a1b35da27d54a07bf7455"} Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.714832 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6598fe2f1cc3d69c0cf0fa1db9fecde320885d36b2a1b35da27d54a07bf7455" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.714891 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567070-h4fjk" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.772392 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567064-nhjmb"] Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.777749 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567064-nhjmb"] Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.818032 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_d2f088cf-8304-46b5-af82-96110c742638/thanos-sidecar/0.log" Mar 20 16:30:07 crc kubenswrapper[4813]: I0320 16:30:07.905198 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-notifications-server-0_0f7bff9e-8c2a-478e-a30e-55d5be1df762/setup-container/0.log" Mar 20 16:30:08 crc kubenswrapper[4813]: I0320 16:30:08.191688 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-notifications-server-0_0f7bff9e-8c2a-478e-a30e-55d5be1df762/rabbitmq/0.log" Mar 20 16:30:08 crc kubenswrapper[4813]: I0320 16:30:08.225548 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-notifications-server-0_0f7bff9e-8c2a-478e-a30e-55d5be1df762/setup-container/0.log" Mar 20 16:30:08 crc kubenswrapper[4813]: I0320 16:30:08.401902 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-server-0_ec1149a7-ab74-4cc6-9e54-66a6136d41ac/setup-container/0.log" Mar 20 16:30:08 crc kubenswrapper[4813]: I0320 16:30:08.688702 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-server-0_ec1149a7-ab74-4cc6-9e54-66a6136d41ac/setup-container/0.log" Mar 20 16:30:08 crc kubenswrapper[4813]: I0320 16:30:08.796544 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-server-0_ec1149a7-ab74-4cc6-9e54-66a6136d41ac/rabbitmq/0.log" Mar 20 16:30:09 crc kubenswrapper[4813]: I0320 16:30:09.276842 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="330a3487-131e-4ff0-884b-cf8df2e6dc66" path="/var/lib/kubelet/pods/330a3487-131e-4ff0-884b-cf8df2e6dc66/volumes" Mar 20 16:30:15 crc kubenswrapper[4813]: I0320 16:30:15.917345 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_memcached-0_c4df32c5-10f9-4047-9224-c7ecb941a55f/memcached/0.log" Mar 20 16:30:20 crc kubenswrapper[4813]: I0320 16:30:20.933422 4813 scope.go:117] "RemoveContainer" containerID="b61af43d9c34e9ac555ff5288ea02486e89f3716b7f991e871ce5fa3ea5ef108" Mar 20 16:30:20 crc kubenswrapper[4813]: I0320 16:30:20.976754 4813 scope.go:117] "RemoveContainer" containerID="59b3434a9e2f0d241decbd570fa4d9d692da6602c00fbb2ea2b93faeb7d50162" Mar 20 16:30:21 crc kubenswrapper[4813]: I0320 16:30:21.026725 4813 scope.go:117] "RemoveContainer" containerID="7446e286e9891874d6503968b437f1847f285b7e0be45a3938f1a95f83a9d9eb" Mar 20 16:30:21 crc kubenswrapper[4813]: I0320 16:30:21.077956 4813 scope.go:117] "RemoveContainer" containerID="007f35ee459cf81da479c6c75adcb3296ba4907325104e4f50aae28d4d8fb198" Mar 20 16:30:21 crc kubenswrapper[4813]: I0320 16:30:21.092929 4813 scope.go:117] "RemoveContainer" containerID="7cceb3b630aa7f2df354e65c2c9161a1ef91016618af00665a7da30e2f6daf89" Mar 20 16:30:21 crc kubenswrapper[4813]: I0320 16:30:21.131005 4813 scope.go:117] "RemoveContainer" containerID="b0c39487064ad77ca862706fc1321d278bf4ca772b90c758ef7ff70504911895" Mar 20 16:30:21 crc kubenswrapper[4813]: I0320 16:30:21.192111 4813 scope.go:117] "RemoveContainer" containerID="173500e822aba777df32f7a3c87bf912fdaa4febed3dd8b89dcd22d9353d9cc8" Mar 20 16:30:21 crc kubenswrapper[4813]: I0320 16:30:21.209107 4813 scope.go:117] "RemoveContainer" containerID="bd16f13546fd49ec42684779e2a3637de7a6745e5bf1e050fcfd7c6b47b163c9" Mar 20 16:30:21 crc kubenswrapper[4813]: I0320 16:30:21.229848 4813 scope.go:117] "RemoveContainer" containerID="9c0c5ff090824f8ae74ef669507e524af6e8716bb1f25827cd1ec60ee89070ff" Mar 20 16:30:27 crc kubenswrapper[4813]: I0320 16:30:27.568334 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c_0259c0a9-34c2-4000-982b-ae1122f345fc/util/0.log" Mar 20 16:30:27 crc kubenswrapper[4813]: I0320 16:30:27.816885 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c_0259c0a9-34c2-4000-982b-ae1122f345fc/pull/0.log" Mar 20 16:30:27 crc kubenswrapper[4813]: I0320 16:30:27.850552 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c_0259c0a9-34c2-4000-982b-ae1122f345fc/pull/0.log" Mar 20 16:30:27 crc kubenswrapper[4813]: I0320 16:30:27.854499 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c_0259c0a9-34c2-4000-982b-ae1122f345fc/util/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.023346 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c_0259c0a9-34c2-4000-982b-ae1122f345fc/util/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.025605 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c_0259c0a9-34c2-4000-982b-ae1122f345fc/extract/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.030021 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde874crv8c_0259c0a9-34c2-4000-982b-ae1122f345fc/pull/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.174331 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk_60e1c3ce-b711-4bac-afd3-60804a46154f/util/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.344176 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk_60e1c3ce-b711-4bac-afd3-60804a46154f/pull/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.376172 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk_60e1c3ce-b711-4bac-afd3-60804a46154f/util/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.380094 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk_60e1c3ce-b711-4bac-afd3-60804a46154f/pull/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.709379 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk_60e1c3ce-b711-4bac-afd3-60804a46154f/util/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.808190 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk_60e1c3ce-b711-4bac-afd3-60804a46154f/extract/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.810678 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c167lkk_60e1c3ce-b711-4bac-afd3-60804a46154f/pull/0.log" Mar 20 16:30:28 crc kubenswrapper[4813]: I0320 16:30:28.926154 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9_665d8eab-08d7-4b69-b20c-75c8151bb3b5/util/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.106328 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9_665d8eab-08d7-4b69-b20c-75c8151bb3b5/pull/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.118409 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9_665d8eab-08d7-4b69-b20c-75c8151bb3b5/pull/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.145636 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9_665d8eab-08d7-4b69-b20c-75c8151bb3b5/util/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.321256 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9_665d8eab-08d7-4b69-b20c-75c8151bb3b5/util/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.379910 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9_665d8eab-08d7-4b69-b20c-75c8151bb3b5/extract/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.421786 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5vx5n9_665d8eab-08d7-4b69-b20c-75c8151bb3b5/pull/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.610414 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m_ee83deb1-a281-4eb3-9ba2-7212ae713ae6/util/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.745446 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m_ee83deb1-a281-4eb3-9ba2-7212ae713ae6/util/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.793310 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m_ee83deb1-a281-4eb3-9ba2-7212ae713ae6/pull/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.797258 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m_ee83deb1-a281-4eb3-9ba2-7212ae713ae6/pull/0.log" Mar 20 16:30:29 crc kubenswrapper[4813]: I0320 16:30:29.961704 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m_ee83deb1-a281-4eb3-9ba2-7212ae713ae6/util/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.001252 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m_ee83deb1-a281-4eb3-9ba2-7212ae713ae6/pull/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.031356 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_93d662022be5376a0ed3676a120a68427f47e4653a19a985adf923972657c7m_ee83deb1-a281-4eb3-9ba2-7212ae713ae6/extract/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.187506 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bfbr2_e0d2a4c2-86dc-48d5-90e0-3fcc872046b6/extract-utilities/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.380700 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bfbr2_e0d2a4c2-86dc-48d5-90e0-3fcc872046b6/extract-content/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.403680 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bfbr2_e0d2a4c2-86dc-48d5-90e0-3fcc872046b6/extract-utilities/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.429658 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bfbr2_e0d2a4c2-86dc-48d5-90e0-3fcc872046b6/extract-content/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.635835 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bfbr2_e0d2a4c2-86dc-48d5-90e0-3fcc872046b6/extract-utilities/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.661657 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bfbr2_e0d2a4c2-86dc-48d5-90e0-3fcc872046b6/extract-content/0.log" Mar 20 16:30:30 crc kubenswrapper[4813]: I0320 16:30:30.850024 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bwvk2_72d4af38-f89a-48ae-bd87-96c7d479310e/extract-utilities/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.069319 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bwvk2_72d4af38-f89a-48ae-bd87-96c7d479310e/extract-content/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.121840 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bwvk2_72d4af38-f89a-48ae-bd87-96c7d479310e/extract-content/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.164431 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bwvk2_72d4af38-f89a-48ae-bd87-96c7d479310e/extract-utilities/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.258749 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bfbr2_e0d2a4c2-86dc-48d5-90e0-3fcc872046b6/registry-server/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.360957 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bwvk2_72d4af38-f89a-48ae-bd87-96c7d479310e/extract-utilities/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.384597 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bwvk2_72d4af38-f89a-48ae-bd87-96c7d479310e/extract-content/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.586099 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-qrqgm_d700a123-9a6b-4d44-89b2-73e09fe026b3/marketplace-operator/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.736137 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-22jh6_d857b935-7153-4ddb-bec2-b55fff235cd4/extract-utilities/0.log" Mar 20 16:30:31 crc kubenswrapper[4813]: I0320 16:30:31.961295 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bwvk2_72d4af38-f89a-48ae-bd87-96c7d479310e/registry-server/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.157840 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-22jh6_d857b935-7153-4ddb-bec2-b55fff235cd4/extract-utilities/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.170088 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-22jh6_d857b935-7153-4ddb-bec2-b55fff235cd4/extract-content/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.212062 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-22jh6_d857b935-7153-4ddb-bec2-b55fff235cd4/extract-content/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.313129 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-22jh6_d857b935-7153-4ddb-bec2-b55fff235cd4/extract-content/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.315278 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-22jh6_d857b935-7153-4ddb-bec2-b55fff235cd4/extract-utilities/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.512957 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-22jh6_d857b935-7153-4ddb-bec2-b55fff235cd4/registry-server/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.540696 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpvn4_77c0d32a-5038-4f35-9faa-8e9cf832b213/extract-utilities/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.655859 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpvn4_77c0d32a-5038-4f35-9faa-8e9cf832b213/extract-utilities/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.746877 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpvn4_77c0d32a-5038-4f35-9faa-8e9cf832b213/extract-content/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.766261 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpvn4_77c0d32a-5038-4f35-9faa-8e9cf832b213/extract-content/0.log" Mar 20 16:30:32 crc kubenswrapper[4813]: I0320 16:30:32.995414 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpvn4_77c0d32a-5038-4f35-9faa-8e9cf832b213/extract-utilities/0.log" Mar 20 16:30:33 crc kubenswrapper[4813]: I0320 16:30:33.042458 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpvn4_77c0d32a-5038-4f35-9faa-8e9cf832b213/extract-content/0.log" Mar 20 16:30:33 crc kubenswrapper[4813]: I0320 16:30:33.390756 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vpvn4_77c0d32a-5038-4f35-9faa-8e9cf832b213/registry-server/0.log" Mar 20 16:30:33 crc kubenswrapper[4813]: I0320 16:30:33.842430 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:30:33 crc kubenswrapper[4813]: I0320 16:30:33.842514 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:30:47 crc kubenswrapper[4813]: I0320 16:30:47.411816 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b7d587c88-7c8zn_fb3c3550-7cd9-4286-8ab3-e554c159b357/prometheus-operator-admission-webhook/0.log" Mar 20 16:30:47 crc kubenswrapper[4813]: I0320 16:30:47.443866 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-8ff7d675-zbjnw_13a0d7fe-7887-4ea4-ae5b-b47d8689373b/prometheus-operator/0.log" Mar 20 16:30:47 crc kubenswrapper[4813]: I0320 16:30:47.498445 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b7d587c88-vcgbb_4ac554a5-8e12-415d-845c-72c909a7d1d2/prometheus-operator-admission-webhook/0.log" Mar 20 16:30:47 crc kubenswrapper[4813]: I0320 16:30:47.626197 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-6dd7dd855f-bvk5s_9cbfb8c9-4eb6-4382-b8b1-572029001cc0/operator/0.log" Mar 20 16:30:47 crc kubenswrapper[4813]: I0320 16:30:47.710668 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-ui-dashboards-7f87b9b85b-6t9mn_6c22deaf-5054-462f-bc80-f21fedd71c41/observability-ui-dashboards/0.log" Mar 20 16:30:47 crc kubenswrapper[4813]: I0320 16:30:47.736290 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-b9bc87685-dshn8_95615498-4862-4d52-9c91-6f2f4f17d4ba/perses-operator/0.log" Mar 20 16:31:03 crc kubenswrapper[4813]: I0320 16:31:03.843031 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:31:03 crc kubenswrapper[4813]: I0320 16:31:03.843541 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:31:21 crc kubenswrapper[4813]: I0320 16:31:21.385374 4813 scope.go:117] "RemoveContainer" containerID="bcb9356ba88adf134d5a8943252d6b298172f6ebad76e905a9061bf8f3ad3893" Mar 20 16:31:21 crc kubenswrapper[4813]: I0320 16:31:21.408237 4813 scope.go:117] "RemoveContainer" containerID="736aa627dc10915297739f0789cb78486e4aabe6f68f487c06818c92b330b2ba" Mar 20 16:31:21 crc kubenswrapper[4813]: I0320 16:31:21.437772 4813 scope.go:117] "RemoveContainer" containerID="40dee85b2b152162fb750d649c9eece5b5b6a852f2f04d9dcbc4d965f5914548" Mar 20 16:31:21 crc kubenswrapper[4813]: I0320 16:31:21.460065 4813 scope.go:117] "RemoveContainer" containerID="3f2ed583b0f209368d294c88f746f04cdfd9b951b9cfe211bbffe99978cbfa8d" Mar 20 16:31:21 crc kubenswrapper[4813]: I0320 16:31:21.475368 4813 scope.go:117] "RemoveContainer" containerID="eb2f3499d5713e3013e2ce1fd2dba305e1edfc34b4689c8c5854d53b04d0b9db" Mar 20 16:31:33 crc kubenswrapper[4813]: I0320 16:31:33.842601 4813 patch_prober.go:28] interesting pod/machine-config-daemon-l8d6t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 16:31:33 crc kubenswrapper[4813]: I0320 16:31:33.843176 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 16:31:33 crc kubenswrapper[4813]: I0320 16:31:33.843224 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" Mar 20 16:31:33 crc kubenswrapper[4813]: I0320 16:31:33.843951 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146"} pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 16:31:33 crc kubenswrapper[4813]: I0320 16:31:33.843996 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerName="machine-config-daemon" containerID="cri-o://61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" gracePeriod=600 Mar 20 16:31:33 crc kubenswrapper[4813]: E0320 16:31:33.984151 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:31:34 crc kubenswrapper[4813]: I0320 16:31:34.637270 4813 generic.go:334] "Generic (PLEG): container finished" podID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" exitCode=0 Mar 20 16:31:34 crc kubenswrapper[4813]: I0320 16:31:34.637307 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" event={"ID":"dbc04883-b38a-4b6a-bee4-f6804c8aad94","Type":"ContainerDied","Data":"61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146"} Mar 20 16:31:34 crc kubenswrapper[4813]: I0320 16:31:34.637375 4813 scope.go:117] "RemoveContainer" containerID="8ae1b78d7363e1a932bd9007ead346614378cdac238d4e7161c7374b208edff8" Mar 20 16:31:34 crc kubenswrapper[4813]: I0320 16:31:34.638082 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:31:34 crc kubenswrapper[4813]: E0320 16:31:34.638362 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:31:50 crc kubenswrapper[4813]: I0320 16:31:50.265983 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:31:50 crc kubenswrapper[4813]: E0320 16:31:50.266735 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:31:59 crc kubenswrapper[4813]: I0320 16:31:59.380584 4813 generic.go:334] "Generic (PLEG): container finished" podID="83b66982-c445-4603-9068-896e7c713661" containerID="9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c" exitCode=0 Mar 20 16:31:59 crc kubenswrapper[4813]: I0320 16:31:59.380654 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9gf9c/must-gather-jf28n" event={"ID":"83b66982-c445-4603-9068-896e7c713661","Type":"ContainerDied","Data":"9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c"} Mar 20 16:31:59 crc kubenswrapper[4813]: I0320 16:31:59.381765 4813 scope.go:117] "RemoveContainer" containerID="9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c" Mar 20 16:31:59 crc kubenswrapper[4813]: I0320 16:31:59.573444 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9gf9c_must-gather-jf28n_83b66982-c445-4603-9068-896e7c713661/gather/0.log" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.171952 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567072-j9lds"] Mar 20 16:32:00 crc kubenswrapper[4813]: E0320 16:32:00.172300 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7964d290-dc58-43ae-bbf5-461ce864849c" containerName="collect-profiles" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.172320 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7964d290-dc58-43ae-bbf5-461ce864849c" containerName="collect-profiles" Mar 20 16:32:00 crc kubenswrapper[4813]: E0320 16:32:00.172345 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c98d88-c20b-4d75-b340-5ca0096c9a0f" containerName="oc" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.172351 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c98d88-c20b-4d75-b340-5ca0096c9a0f" containerName="oc" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.172522 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="10c98d88-c20b-4d75-b340-5ca0096c9a0f" containerName="oc" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.172542 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7964d290-dc58-43ae-bbf5-461ce864849c" containerName="collect-profiles" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.173139 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567072-j9lds" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.175472 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.175699 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.181817 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.183586 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567072-j9lds"] Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.373408 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd5w7\" (UniqueName: \"kubernetes.io/projected/b9dfcdbd-594f-46c7-96f5-9ee6decc42d7-kube-api-access-pd5w7\") pod \"auto-csr-approver-29567072-j9lds\" (UID: \"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7\") " pod="openshift-infra/auto-csr-approver-29567072-j9lds" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.474311 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd5w7\" (UniqueName: \"kubernetes.io/projected/b9dfcdbd-594f-46c7-96f5-9ee6decc42d7-kube-api-access-pd5w7\") pod \"auto-csr-approver-29567072-j9lds\" (UID: \"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7\") " pod="openshift-infra/auto-csr-approver-29567072-j9lds" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.495859 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd5w7\" (UniqueName: \"kubernetes.io/projected/b9dfcdbd-594f-46c7-96f5-9ee6decc42d7-kube-api-access-pd5w7\") pod \"auto-csr-approver-29567072-j9lds\" (UID: \"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7\") " pod="openshift-infra/auto-csr-approver-29567072-j9lds" Mar 20 16:32:00 crc kubenswrapper[4813]: I0320 16:32:00.789437 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567072-j9lds" Mar 20 16:32:01 crc kubenswrapper[4813]: I0320 16:32:01.236184 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567072-j9lds"] Mar 20 16:32:01 crc kubenswrapper[4813]: I0320 16:32:01.247442 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 16:32:01 crc kubenswrapper[4813]: I0320 16:32:01.271055 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:32:01 crc kubenswrapper[4813]: E0320 16:32:01.271292 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:32:01 crc kubenswrapper[4813]: I0320 16:32:01.396607 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567072-j9lds" event={"ID":"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7","Type":"ContainerStarted","Data":"77b697c17ed728af41b63b0b5a53c89d0cfd0477b92dd9252d41cab5d7ebb08b"} Mar 20 16:32:03 crc kubenswrapper[4813]: I0320 16:32:03.417069 4813 generic.go:334] "Generic (PLEG): container finished" podID="b9dfcdbd-594f-46c7-96f5-9ee6decc42d7" containerID="fe6d1e802abedbd8e04c38aa56879ae7787ece85102fe98ea49651d24c752d5c" exitCode=0 Mar 20 16:32:03 crc kubenswrapper[4813]: I0320 16:32:03.417126 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567072-j9lds" event={"ID":"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7","Type":"ContainerDied","Data":"fe6d1e802abedbd8e04c38aa56879ae7787ece85102fe98ea49651d24c752d5c"} Mar 20 16:32:04 crc kubenswrapper[4813]: I0320 16:32:04.702092 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567072-j9lds" Mar 20 16:32:04 crc kubenswrapper[4813]: I0320 16:32:04.856525 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd5w7\" (UniqueName: \"kubernetes.io/projected/b9dfcdbd-594f-46c7-96f5-9ee6decc42d7-kube-api-access-pd5w7\") pod \"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7\" (UID: \"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7\") " Mar 20 16:32:04 crc kubenswrapper[4813]: I0320 16:32:04.862764 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9dfcdbd-594f-46c7-96f5-9ee6decc42d7-kube-api-access-pd5w7" (OuterVolumeSpecName: "kube-api-access-pd5w7") pod "b9dfcdbd-594f-46c7-96f5-9ee6decc42d7" (UID: "b9dfcdbd-594f-46c7-96f5-9ee6decc42d7"). InnerVolumeSpecName "kube-api-access-pd5w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:32:04 crc kubenswrapper[4813]: I0320 16:32:04.958894 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd5w7\" (UniqueName: \"kubernetes.io/projected/b9dfcdbd-594f-46c7-96f5-9ee6decc42d7-kube-api-access-pd5w7\") on node \"crc\" DevicePath \"\"" Mar 20 16:32:05 crc kubenswrapper[4813]: I0320 16:32:05.440975 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567072-j9lds" event={"ID":"b9dfcdbd-594f-46c7-96f5-9ee6decc42d7","Type":"ContainerDied","Data":"77b697c17ed728af41b63b0b5a53c89d0cfd0477b92dd9252d41cab5d7ebb08b"} Mar 20 16:32:05 crc kubenswrapper[4813]: I0320 16:32:05.441020 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77b697c17ed728af41b63b0b5a53c89d0cfd0477b92dd9252d41cab5d7ebb08b" Mar 20 16:32:05 crc kubenswrapper[4813]: I0320 16:32:05.441079 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567072-j9lds" Mar 20 16:32:05 crc kubenswrapper[4813]: I0320 16:32:05.769097 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567066-c9q56"] Mar 20 16:32:05 crc kubenswrapper[4813]: I0320 16:32:05.774920 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567066-c9q56"] Mar 20 16:32:07 crc kubenswrapper[4813]: I0320 16:32:07.275629 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85e8755f-951d-4604-8d18-b2c7c6d17d27" path="/var/lib/kubelet/pods/85e8755f-951d-4604-8d18-b2c7c6d17d27/volumes" Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.373991 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9gf9c/must-gather-jf28n"] Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.374328 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9gf9c/must-gather-jf28n"] Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.374529 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9gf9c/must-gather-jf28n" podUID="83b66982-c445-4603-9068-896e7c713661" containerName="copy" containerID="cri-o://15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5" gracePeriod=2 Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.803759 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9gf9c_must-gather-jf28n_83b66982-c445-4603-9068-896e7c713661/copy/0.log" Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.804328 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.917745 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bjnl\" (UniqueName: \"kubernetes.io/projected/83b66982-c445-4603-9068-896e7c713661-kube-api-access-6bjnl\") pod \"83b66982-c445-4603-9068-896e7c713661\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.917902 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/83b66982-c445-4603-9068-896e7c713661-must-gather-output\") pod \"83b66982-c445-4603-9068-896e7c713661\" (UID: \"83b66982-c445-4603-9068-896e7c713661\") " Mar 20 16:32:08 crc kubenswrapper[4813]: I0320 16:32:08.941717 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83b66982-c445-4603-9068-896e7c713661-kube-api-access-6bjnl" (OuterVolumeSpecName: "kube-api-access-6bjnl") pod "83b66982-c445-4603-9068-896e7c713661" (UID: "83b66982-c445-4603-9068-896e7c713661"). InnerVolumeSpecName "kube-api-access-6bjnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.022591 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bjnl\" (UniqueName: \"kubernetes.io/projected/83b66982-c445-4603-9068-896e7c713661-kube-api-access-6bjnl\") on node \"crc\" DevicePath \"\"" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.029233 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83b66982-c445-4603-9068-896e7c713661-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "83b66982-c445-4603-9068-896e7c713661" (UID: "83b66982-c445-4603-9068-896e7c713661"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.124397 4813 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/83b66982-c445-4603-9068-896e7c713661-must-gather-output\") on node \"crc\" DevicePath \"\"" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.276766 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83b66982-c445-4603-9068-896e7c713661" path="/var/lib/kubelet/pods/83b66982-c445-4603-9068-896e7c713661/volumes" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.476625 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9gf9c_must-gather-jf28n_83b66982-c445-4603-9068-896e7c713661/copy/0.log" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.478217 4813 generic.go:334] "Generic (PLEG): container finished" podID="83b66982-c445-4603-9068-896e7c713661" containerID="15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5" exitCode=143 Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.478273 4813 scope.go:117] "RemoveContainer" containerID="15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.478355 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9gf9c/must-gather-jf28n" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.502274 4813 scope.go:117] "RemoveContainer" containerID="9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.581169 4813 scope.go:117] "RemoveContainer" containerID="15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5" Mar 20 16:32:09 crc kubenswrapper[4813]: E0320 16:32:09.582055 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5\": container with ID starting with 15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5 not found: ID does not exist" containerID="15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.582098 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5"} err="failed to get container status \"15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5\": rpc error: code = NotFound desc = could not find container \"15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5\": container with ID starting with 15deeedfa106a6506f8d76d033d49d828cf8304ded5fe72751c1fe8cbe1e1be5 not found: ID does not exist" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.582123 4813 scope.go:117] "RemoveContainer" containerID="9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c" Mar 20 16:32:09 crc kubenswrapper[4813]: E0320 16:32:09.582766 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c\": container with ID starting with 9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c not found: ID does not exist" containerID="9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c" Mar 20 16:32:09 crc kubenswrapper[4813]: I0320 16:32:09.582807 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c"} err="failed to get container status \"9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c\": rpc error: code = NotFound desc = could not find container \"9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c\": container with ID starting with 9ca5ce6f1b1126577f51bf679ab6134a25d531efb06166c94b4dbbe77f60253c not found: ID does not exist" Mar 20 16:32:13 crc kubenswrapper[4813]: I0320 16:32:13.266078 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:32:13 crc kubenswrapper[4813]: E0320 16:32:13.267186 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:32:21 crc kubenswrapper[4813]: I0320 16:32:21.523283 4813 scope.go:117] "RemoveContainer" containerID="c308ecfe017e239b7cfa0ce0162057e65ba4b3592d72c1cfde47e8d034d68b33" Mar 20 16:32:21 crc kubenswrapper[4813]: I0320 16:32:21.547104 4813 scope.go:117] "RemoveContainer" containerID="ca6e68e6dbfbcda8642182fc54f06affe2753a944d7af62beb9276127bffc0b9" Mar 20 16:32:21 crc kubenswrapper[4813]: I0320 16:32:21.576421 4813 scope.go:117] "RemoveContainer" containerID="0084f4780ab0fbfeb9661a7b5d49a726f5dee1a99d5205505567f372733eb033" Mar 20 16:32:21 crc kubenswrapper[4813]: I0320 16:32:21.664976 4813 scope.go:117] "RemoveContainer" containerID="c5cae7a847026d6443dc26b913ce20a2843559592d4de93755c16eca648542fd" Mar 20 16:32:21 crc kubenswrapper[4813]: I0320 16:32:21.763804 4813 scope.go:117] "RemoveContainer" containerID="ac32a9cf8c62423ca93032c45df6701187d840b979dc61345472f90c0eb11d0a" Mar 20 16:32:21 crc kubenswrapper[4813]: I0320 16:32:21.815639 4813 scope.go:117] "RemoveContainer" containerID="60ad7fe9d32aae62c8bd0e22facefe1fb91f0420aec5bef4f48af187bb0e4d76" Mar 20 16:32:24 crc kubenswrapper[4813]: I0320 16:32:24.266007 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:32:24 crc kubenswrapper[4813]: E0320 16:32:24.266515 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:32:36 crc kubenswrapper[4813]: I0320 16:32:36.266089 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:32:36 crc kubenswrapper[4813]: E0320 16:32:36.266697 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:32:49 crc kubenswrapper[4813]: I0320 16:32:49.266323 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:32:49 crc kubenswrapper[4813]: E0320 16:32:49.267103 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.801666 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tnsrs"] Mar 20 16:32:54 crc kubenswrapper[4813]: E0320 16:32:54.802447 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9dfcdbd-594f-46c7-96f5-9ee6decc42d7" containerName="oc" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.802458 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9dfcdbd-594f-46c7-96f5-9ee6decc42d7" containerName="oc" Mar 20 16:32:54 crc kubenswrapper[4813]: E0320 16:32:54.802472 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b66982-c445-4603-9068-896e7c713661" containerName="gather" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.802492 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b66982-c445-4603-9068-896e7c713661" containerName="gather" Mar 20 16:32:54 crc kubenswrapper[4813]: E0320 16:32:54.802510 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b66982-c445-4603-9068-896e7c713661" containerName="copy" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.802516 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b66982-c445-4603-9068-896e7c713661" containerName="copy" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.802650 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9dfcdbd-594f-46c7-96f5-9ee6decc42d7" containerName="oc" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.802666 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="83b66982-c445-4603-9068-896e7c713661" containerName="gather" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.802675 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="83b66982-c445-4603-9068-896e7c713661" containerName="copy" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.803759 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.818434 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnsrs"] Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.886360 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-utilities\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.886429 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8gdn\" (UniqueName: \"kubernetes.io/projected/0970d845-6d61-49c8-aa86-f056826a95ef-kube-api-access-k8gdn\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.886556 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-catalog-content\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.987416 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-catalog-content\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.987585 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-utilities\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.987649 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8gdn\" (UniqueName: \"kubernetes.io/projected/0970d845-6d61-49c8-aa86-f056826a95ef-kube-api-access-k8gdn\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.987848 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-catalog-content\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:54 crc kubenswrapper[4813]: I0320 16:32:54.987926 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-utilities\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:55 crc kubenswrapper[4813]: I0320 16:32:55.016999 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8gdn\" (UniqueName: \"kubernetes.io/projected/0970d845-6d61-49c8-aa86-f056826a95ef-kube-api-access-k8gdn\") pod \"redhat-operators-tnsrs\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:55 crc kubenswrapper[4813]: I0320 16:32:55.126035 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:32:55 crc kubenswrapper[4813]: I0320 16:32:55.589973 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnsrs"] Mar 20 16:32:55 crc kubenswrapper[4813]: I0320 16:32:55.848063 4813 generic.go:334] "Generic (PLEG): container finished" podID="0970d845-6d61-49c8-aa86-f056826a95ef" containerID="ce563fd1d06d837c9a58ffc3bbff796b783a913abb12e5221ac3d41a140b5b8a" exitCode=0 Mar 20 16:32:55 crc kubenswrapper[4813]: I0320 16:32:55.848115 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnsrs" event={"ID":"0970d845-6d61-49c8-aa86-f056826a95ef","Type":"ContainerDied","Data":"ce563fd1d06d837c9a58ffc3bbff796b783a913abb12e5221ac3d41a140b5b8a"} Mar 20 16:32:55 crc kubenswrapper[4813]: I0320 16:32:55.849404 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnsrs" event={"ID":"0970d845-6d61-49c8-aa86-f056826a95ef","Type":"ContainerStarted","Data":"73b5cde05ba9948d775e97d9f1e9929a353098e438543a0ad7e27be0aaa2b981"} Mar 20 16:32:56 crc kubenswrapper[4813]: I0320 16:32:56.862250 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnsrs" event={"ID":"0970d845-6d61-49c8-aa86-f056826a95ef","Type":"ContainerStarted","Data":"48c65e7ce656f4ad673bac3ea8941ba7cb4240ff3a223ebc9143de26bf9352dc"} Mar 20 16:32:57 crc kubenswrapper[4813]: I0320 16:32:57.871739 4813 generic.go:334] "Generic (PLEG): container finished" podID="0970d845-6d61-49c8-aa86-f056826a95ef" containerID="48c65e7ce656f4ad673bac3ea8941ba7cb4240ff3a223ebc9143de26bf9352dc" exitCode=0 Mar 20 16:32:57 crc kubenswrapper[4813]: I0320 16:32:57.871781 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnsrs" event={"ID":"0970d845-6d61-49c8-aa86-f056826a95ef","Type":"ContainerDied","Data":"48c65e7ce656f4ad673bac3ea8941ba7cb4240ff3a223ebc9143de26bf9352dc"} Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.203780 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x5ztf"] Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.208365 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.238239 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-utilities\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.238347 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-catalog-content\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.238404 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7448\" (UniqueName: \"kubernetes.io/projected/2c68606b-1bed-426f-8fd1-f3928544bb79-kube-api-access-f7448\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.241166 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5ztf"] Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.340534 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7448\" (UniqueName: \"kubernetes.io/projected/2c68606b-1bed-426f-8fd1-f3928544bb79-kube-api-access-f7448\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.341023 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-utilities\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.341105 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-catalog-content\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.341680 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-catalog-content\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.342508 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-utilities\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.369702 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7448\" (UniqueName: \"kubernetes.io/projected/2c68606b-1bed-426f-8fd1-f3928544bb79-kube-api-access-f7448\") pod \"certified-operators-x5ztf\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.525404 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.880820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnsrs" event={"ID":"0970d845-6d61-49c8-aa86-f056826a95ef","Type":"ContainerStarted","Data":"eae0088f15552bb49b169a142d1b8c2ebc92d5297c9cd34cdad80de440fafbed"} Mar 20 16:32:58 crc kubenswrapper[4813]: I0320 16:32:58.904529 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tnsrs" podStartSLOduration=2.49242355 podStartE2EDuration="4.904506821s" podCreationTimestamp="2026-03-20 16:32:54 +0000 UTC" firstStartedPulling="2026-03-20 16:32:55.849804746 +0000 UTC m=+3305.272507587" lastFinishedPulling="2026-03-20 16:32:58.261888007 +0000 UTC m=+3307.684590858" observedRunningTime="2026-03-20 16:32:58.897236356 +0000 UTC m=+3308.319939197" watchObservedRunningTime="2026-03-20 16:32:58.904506821 +0000 UTC m=+3308.327209662" Mar 20 16:32:59 crc kubenswrapper[4813]: W0320 16:32:59.052157 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c68606b_1bed_426f_8fd1_f3928544bb79.slice/crio-5099b23b312ed40782e411c17f5dd120ee56bf6a628e978389d7304ce65769f6 WatchSource:0}: Error finding container 5099b23b312ed40782e411c17f5dd120ee56bf6a628e978389d7304ce65769f6: Status 404 returned error can't find the container with id 5099b23b312ed40782e411c17f5dd120ee56bf6a628e978389d7304ce65769f6 Mar 20 16:32:59 crc kubenswrapper[4813]: I0320 16:32:59.052232 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5ztf"] Mar 20 16:32:59 crc kubenswrapper[4813]: I0320 16:32:59.891452 4813 generic.go:334] "Generic (PLEG): container finished" podID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerID="5eb851043dae1196dec0d48996079bd7115f3926a2cce4ce6fb569ac6925c4d2" exitCode=0 Mar 20 16:32:59 crc kubenswrapper[4813]: I0320 16:32:59.891533 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ztf" event={"ID":"2c68606b-1bed-426f-8fd1-f3928544bb79","Type":"ContainerDied","Data":"5eb851043dae1196dec0d48996079bd7115f3926a2cce4ce6fb569ac6925c4d2"} Mar 20 16:32:59 crc kubenswrapper[4813]: I0320 16:32:59.891848 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ztf" event={"ID":"2c68606b-1bed-426f-8fd1-f3928544bb79","Type":"ContainerStarted","Data":"5099b23b312ed40782e411c17f5dd120ee56bf6a628e978389d7304ce65769f6"} Mar 20 16:33:00 crc kubenswrapper[4813]: I0320 16:33:00.902106 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ztf" event={"ID":"2c68606b-1bed-426f-8fd1-f3928544bb79","Type":"ContainerStarted","Data":"12e54e4ebbafdc544e6d605ed11f664635ef85d6f0a760e14d9407a859ad3cd3"} Mar 20 16:33:01 crc kubenswrapper[4813]: I0320 16:33:01.911253 4813 generic.go:334] "Generic (PLEG): container finished" podID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerID="12e54e4ebbafdc544e6d605ed11f664635ef85d6f0a760e14d9407a859ad3cd3" exitCode=0 Mar 20 16:33:01 crc kubenswrapper[4813]: I0320 16:33:01.911304 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ztf" event={"ID":"2c68606b-1bed-426f-8fd1-f3928544bb79","Type":"ContainerDied","Data":"12e54e4ebbafdc544e6d605ed11f664635ef85d6f0a760e14d9407a859ad3cd3"} Mar 20 16:33:02 crc kubenswrapper[4813]: I0320 16:33:02.922390 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ztf" event={"ID":"2c68606b-1bed-426f-8fd1-f3928544bb79","Type":"ContainerStarted","Data":"59341b7a09db7f2b011941391f254af25130246bf7a1e6848866124267b0e381"} Mar 20 16:33:02 crc kubenswrapper[4813]: I0320 16:33:02.955882 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x5ztf" podStartSLOduration=2.524290474 podStartE2EDuration="4.955851668s" podCreationTimestamp="2026-03-20 16:32:58 +0000 UTC" firstStartedPulling="2026-03-20 16:32:59.892878292 +0000 UTC m=+3309.315581133" lastFinishedPulling="2026-03-20 16:33:02.324439476 +0000 UTC m=+3311.747142327" observedRunningTime="2026-03-20 16:33:02.944175024 +0000 UTC m=+3312.366877865" watchObservedRunningTime="2026-03-20 16:33:02.955851668 +0000 UTC m=+3312.378554529" Mar 20 16:33:03 crc kubenswrapper[4813]: I0320 16:33:03.266216 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:33:03 crc kubenswrapper[4813]: E0320 16:33:03.266783 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:33:05 crc kubenswrapper[4813]: I0320 16:33:05.126923 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:33:05 crc kubenswrapper[4813]: I0320 16:33:05.126992 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:33:05 crc kubenswrapper[4813]: I0320 16:33:05.192921 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:33:05 crc kubenswrapper[4813]: I0320 16:33:05.997671 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:33:07 crc kubenswrapper[4813]: I0320 16:33:07.796508 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnsrs"] Mar 20 16:33:07 crc kubenswrapper[4813]: I0320 16:33:07.971228 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tnsrs" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="registry-server" containerID="cri-o://eae0088f15552bb49b169a142d1b8c2ebc92d5297c9cd34cdad80de440fafbed" gracePeriod=2 Mar 20 16:33:08 crc kubenswrapper[4813]: I0320 16:33:08.526566 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:33:08 crc kubenswrapper[4813]: I0320 16:33:08.527589 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:33:08 crc kubenswrapper[4813]: I0320 16:33:08.571875 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:33:09 crc kubenswrapper[4813]: I0320 16:33:09.088037 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:33:09 crc kubenswrapper[4813]: I0320 16:33:09.992428 4813 generic.go:334] "Generic (PLEG): container finished" podID="0970d845-6d61-49c8-aa86-f056826a95ef" containerID="eae0088f15552bb49b169a142d1b8c2ebc92d5297c9cd34cdad80de440fafbed" exitCode=0 Mar 20 16:33:09 crc kubenswrapper[4813]: I0320 16:33:09.992518 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnsrs" event={"ID":"0970d845-6d61-49c8-aa86-f056826a95ef","Type":"ContainerDied","Data":"eae0088f15552bb49b169a142d1b8c2ebc92d5297c9cd34cdad80de440fafbed"} Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.235305 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.430052 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-utilities\") pod \"0970d845-6d61-49c8-aa86-f056826a95ef\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.430251 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8gdn\" (UniqueName: \"kubernetes.io/projected/0970d845-6d61-49c8-aa86-f056826a95ef-kube-api-access-k8gdn\") pod \"0970d845-6d61-49c8-aa86-f056826a95ef\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.430304 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-catalog-content\") pod \"0970d845-6d61-49c8-aa86-f056826a95ef\" (UID: \"0970d845-6d61-49c8-aa86-f056826a95ef\") " Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.430913 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-utilities" (OuterVolumeSpecName: "utilities") pod "0970d845-6d61-49c8-aa86-f056826a95ef" (UID: "0970d845-6d61-49c8-aa86-f056826a95ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.445112 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0970d845-6d61-49c8-aa86-f056826a95ef-kube-api-access-k8gdn" (OuterVolumeSpecName: "kube-api-access-k8gdn") pod "0970d845-6d61-49c8-aa86-f056826a95ef" (UID: "0970d845-6d61-49c8-aa86-f056826a95ef"). InnerVolumeSpecName "kube-api-access-k8gdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.531784 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.531822 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8gdn\" (UniqueName: \"kubernetes.io/projected/0970d845-6d61-49c8-aa86-f056826a95ef-kube-api-access-k8gdn\") on node \"crc\" DevicePath \"\"" Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.565271 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0970d845-6d61-49c8-aa86-f056826a95ef" (UID: "0970d845-6d61-49c8-aa86-f056826a95ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:33:10 crc kubenswrapper[4813]: I0320 16:33:10.632982 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0970d845-6d61-49c8-aa86-f056826a95ef-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.005876 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnsrs" event={"ID":"0970d845-6d61-49c8-aa86-f056826a95ef","Type":"ContainerDied","Data":"73b5cde05ba9948d775e97d9f1e9929a353098e438543a0ad7e27be0aaa2b981"} Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.005928 4813 scope.go:117] "RemoveContainer" containerID="eae0088f15552bb49b169a142d1b8c2ebc92d5297c9cd34cdad80de440fafbed" Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.005977 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnsrs" Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.029478 4813 scope.go:117] "RemoveContainer" containerID="48c65e7ce656f4ad673bac3ea8941ba7cb4240ff3a223ebc9143de26bf9352dc" Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.046594 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnsrs"] Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.055204 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tnsrs"] Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.059770 4813 scope.go:117] "RemoveContainer" containerID="ce563fd1d06d837c9a58ffc3bbff796b783a913abb12e5221ac3d41a140b5b8a" Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.222400 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5ztf"] Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.223167 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x5ztf" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="registry-server" containerID="cri-o://59341b7a09db7f2b011941391f254af25130246bf7a1e6848866124267b0e381" gracePeriod=2 Mar 20 16:33:11 crc kubenswrapper[4813]: I0320 16:33:11.276207 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" path="/var/lib/kubelet/pods/0970d845-6d61-49c8-aa86-f056826a95ef/volumes" Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.020227 4813 generic.go:334] "Generic (PLEG): container finished" podID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerID="59341b7a09db7f2b011941391f254af25130246bf7a1e6848866124267b0e381" exitCode=0 Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.020263 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ztf" event={"ID":"2c68606b-1bed-426f-8fd1-f3928544bb79","Type":"ContainerDied","Data":"59341b7a09db7f2b011941391f254af25130246bf7a1e6848866124267b0e381"} Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.340258 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.466763 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-utilities\") pod \"2c68606b-1bed-426f-8fd1-f3928544bb79\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.466943 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7448\" (UniqueName: \"kubernetes.io/projected/2c68606b-1bed-426f-8fd1-f3928544bb79-kube-api-access-f7448\") pod \"2c68606b-1bed-426f-8fd1-f3928544bb79\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.467070 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-catalog-content\") pod \"2c68606b-1bed-426f-8fd1-f3928544bb79\" (UID: \"2c68606b-1bed-426f-8fd1-f3928544bb79\") " Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.467902 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-utilities" (OuterVolumeSpecName: "utilities") pod "2c68606b-1bed-426f-8fd1-f3928544bb79" (UID: "2c68606b-1bed-426f-8fd1-f3928544bb79"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.475422 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c68606b-1bed-426f-8fd1-f3928544bb79-kube-api-access-f7448" (OuterVolumeSpecName: "kube-api-access-f7448") pod "2c68606b-1bed-426f-8fd1-f3928544bb79" (UID: "2c68606b-1bed-426f-8fd1-f3928544bb79"). InnerVolumeSpecName "kube-api-access-f7448". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.568670 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.568696 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7448\" (UniqueName: \"kubernetes.io/projected/2c68606b-1bed-426f-8fd1-f3928544bb79-kube-api-access-f7448\") on node \"crc\" DevicePath \"\"" Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.863468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c68606b-1bed-426f-8fd1-f3928544bb79" (UID: "2c68606b-1bed-426f-8fd1-f3928544bb79"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 16:33:12 crc kubenswrapper[4813]: I0320 16:33:12.872952 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c68606b-1bed-426f-8fd1-f3928544bb79-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.028632 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ztf" event={"ID":"2c68606b-1bed-426f-8fd1-f3928544bb79","Type":"ContainerDied","Data":"5099b23b312ed40782e411c17f5dd120ee56bf6a628e978389d7304ce65769f6"} Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.028688 4813 scope.go:117] "RemoveContainer" containerID="59341b7a09db7f2b011941391f254af25130246bf7a1e6848866124267b0e381" Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.029067 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ztf" Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.058929 4813 scope.go:117] "RemoveContainer" containerID="12e54e4ebbafdc544e6d605ed11f664635ef85d6f0a760e14d9407a859ad3cd3" Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.070195 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5ztf"] Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.080403 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x5ztf"] Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.107681 4813 scope.go:117] "RemoveContainer" containerID="5eb851043dae1196dec0d48996079bd7115f3926a2cce4ce6fb569ac6925c4d2" Mar 20 16:33:13 crc kubenswrapper[4813]: I0320 16:33:13.275820 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" path="/var/lib/kubelet/pods/2c68606b-1bed-426f-8fd1-f3928544bb79/volumes" Mar 20 16:33:16 crc kubenswrapper[4813]: I0320 16:33:16.265938 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:33:16 crc kubenswrapper[4813]: E0320 16:33:16.266639 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:33:28 crc kubenswrapper[4813]: I0320 16:33:28.266702 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:33:28 crc kubenswrapper[4813]: E0320 16:33:28.267668 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:33:41 crc kubenswrapper[4813]: I0320 16:33:41.271592 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:33:41 crc kubenswrapper[4813]: E0320 16:33:41.272650 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:33:55 crc kubenswrapper[4813]: I0320 16:33:55.265861 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:33:55 crc kubenswrapper[4813]: E0320 16:33:55.266710 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.145603 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567074-v5csb"] Mar 20 16:34:00 crc kubenswrapper[4813]: E0320 16:34:00.146209 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="extract-utilities" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146221 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="extract-utilities" Mar 20 16:34:00 crc kubenswrapper[4813]: E0320 16:34:00.146236 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="extract-content" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146243 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="extract-content" Mar 20 16:34:00 crc kubenswrapper[4813]: E0320 16:34:00.146256 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="extract-content" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146262 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="extract-content" Mar 20 16:34:00 crc kubenswrapper[4813]: E0320 16:34:00.146272 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="extract-utilities" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146278 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="extract-utilities" Mar 20 16:34:00 crc kubenswrapper[4813]: E0320 16:34:00.146289 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="registry-server" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146295 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="registry-server" Mar 20 16:34:00 crc kubenswrapper[4813]: E0320 16:34:00.146312 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="registry-server" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146317 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="registry-server" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146468 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0970d845-6d61-49c8-aa86-f056826a95ef" containerName="registry-server" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.146535 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c68606b-1bed-426f-8fd1-f3928544bb79" containerName="registry-server" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.147054 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567074-v5csb" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.149918 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.149938 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.151129 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.159461 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567074-v5csb"] Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.242620 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd6b6\" (UniqueName: \"kubernetes.io/projected/109e87d3-e486-464f-80aa-3e97f4b8b72f-kube-api-access-sd6b6\") pod \"auto-csr-approver-29567074-v5csb\" (UID: \"109e87d3-e486-464f-80aa-3e97f4b8b72f\") " pod="openshift-infra/auto-csr-approver-29567074-v5csb" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.343892 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd6b6\" (UniqueName: \"kubernetes.io/projected/109e87d3-e486-464f-80aa-3e97f4b8b72f-kube-api-access-sd6b6\") pod \"auto-csr-approver-29567074-v5csb\" (UID: \"109e87d3-e486-464f-80aa-3e97f4b8b72f\") " pod="openshift-infra/auto-csr-approver-29567074-v5csb" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.374752 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd6b6\" (UniqueName: \"kubernetes.io/projected/109e87d3-e486-464f-80aa-3e97f4b8b72f-kube-api-access-sd6b6\") pod \"auto-csr-approver-29567074-v5csb\" (UID: \"109e87d3-e486-464f-80aa-3e97f4b8b72f\") " pod="openshift-infra/auto-csr-approver-29567074-v5csb" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.467116 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567074-v5csb" Mar 20 16:34:00 crc kubenswrapper[4813]: I0320 16:34:00.954208 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567074-v5csb"] Mar 20 16:34:01 crc kubenswrapper[4813]: I0320 16:34:01.443919 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567074-v5csb" event={"ID":"109e87d3-e486-464f-80aa-3e97f4b8b72f","Type":"ContainerStarted","Data":"69c4d80f6644413f043f6517953fcca4c9270317c8c657fdaa2c74638698b193"} Mar 20 16:34:02 crc kubenswrapper[4813]: I0320 16:34:02.453156 4813 generic.go:334] "Generic (PLEG): container finished" podID="109e87d3-e486-464f-80aa-3e97f4b8b72f" containerID="2d231b83a1464d76690df31cf0db5e34b80862599a6ff4543bfd106bcad73e34" exitCode=0 Mar 20 16:34:02 crc kubenswrapper[4813]: I0320 16:34:02.453223 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567074-v5csb" event={"ID":"109e87d3-e486-464f-80aa-3e97f4b8b72f","Type":"ContainerDied","Data":"2d231b83a1464d76690df31cf0db5e34b80862599a6ff4543bfd106bcad73e34"} Mar 20 16:34:03 crc kubenswrapper[4813]: I0320 16:34:03.829211 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567074-v5csb" Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.006774 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd6b6\" (UniqueName: \"kubernetes.io/projected/109e87d3-e486-464f-80aa-3e97f4b8b72f-kube-api-access-sd6b6\") pod \"109e87d3-e486-464f-80aa-3e97f4b8b72f\" (UID: \"109e87d3-e486-464f-80aa-3e97f4b8b72f\") " Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.011441 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/109e87d3-e486-464f-80aa-3e97f4b8b72f-kube-api-access-sd6b6" (OuterVolumeSpecName: "kube-api-access-sd6b6") pod "109e87d3-e486-464f-80aa-3e97f4b8b72f" (UID: "109e87d3-e486-464f-80aa-3e97f4b8b72f"). InnerVolumeSpecName "kube-api-access-sd6b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.108996 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd6b6\" (UniqueName: \"kubernetes.io/projected/109e87d3-e486-464f-80aa-3e97f4b8b72f-kube-api-access-sd6b6\") on node \"crc\" DevicePath \"\"" Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.478352 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567074-v5csb" event={"ID":"109e87d3-e486-464f-80aa-3e97f4b8b72f","Type":"ContainerDied","Data":"69c4d80f6644413f043f6517953fcca4c9270317c8c657fdaa2c74638698b193"} Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.478447 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69c4d80f6644413f043f6517953fcca4c9270317c8c657fdaa2c74638698b193" Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.478581 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567074-v5csb" Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.909299 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567068-plqw2"] Mar 20 16:34:04 crc kubenswrapper[4813]: I0320 16:34:04.915699 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567068-plqw2"] Mar 20 16:34:05 crc kubenswrapper[4813]: I0320 16:34:05.285310 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4537163d-0b02-40b2-ad77-ae3ec4022504" path="/var/lib/kubelet/pods/4537163d-0b02-40b2-ad77-ae3ec4022504/volumes" Mar 20 16:34:10 crc kubenswrapper[4813]: I0320 16:34:10.266556 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:34:10 crc kubenswrapper[4813]: E0320 16:34:10.267127 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:34:21 crc kubenswrapper[4813]: I0320 16:34:21.984534 4813 scope.go:117] "RemoveContainer" containerID="19815ea197757157f0a9184b054ad7dc1eaf28b005e2eaa469c746cf2b1343db" Mar 20 16:34:25 crc kubenswrapper[4813]: I0320 16:34:25.266061 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:34:25 crc kubenswrapper[4813]: E0320 16:34:25.266636 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:34:36 crc kubenswrapper[4813]: I0320 16:34:36.265447 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:34:36 crc kubenswrapper[4813]: E0320 16:34:36.266956 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:34:49 crc kubenswrapper[4813]: I0320 16:34:49.266590 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:34:49 crc kubenswrapper[4813]: E0320 16:34:49.267288 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:35:01 crc kubenswrapper[4813]: I0320 16:35:01.278095 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:35:01 crc kubenswrapper[4813]: E0320 16:35:01.279626 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:35:13 crc kubenswrapper[4813]: I0320 16:35:13.266231 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:35:13 crc kubenswrapper[4813]: E0320 16:35:13.267204 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:35:24 crc kubenswrapper[4813]: I0320 16:35:24.266224 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:35:24 crc kubenswrapper[4813]: E0320 16:35:24.267015 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:35:38 crc kubenswrapper[4813]: I0320 16:35:38.266053 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:35:38 crc kubenswrapper[4813]: E0320 16:35:38.266993 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:35:53 crc kubenswrapper[4813]: I0320 16:35:53.266991 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:35:53 crc kubenswrapper[4813]: E0320 16:35:53.270586 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.143889 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29567076-xbzbf"] Mar 20 16:36:00 crc kubenswrapper[4813]: E0320 16:36:00.144692 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="109e87d3-e486-464f-80aa-3e97f4b8b72f" containerName="oc" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.144704 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="109e87d3-e486-464f-80aa-3e97f4b8b72f" containerName="oc" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.144897 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="109e87d3-e486-464f-80aa-3e97f4b8b72f" containerName="oc" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.145409 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567076-xbzbf" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.147828 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.147973 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-pbkkd" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.148201 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.166358 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567076-xbzbf"] Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.328090 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld6t5\" (UniqueName: \"kubernetes.io/projected/eb0122dc-6ca1-4477-b737-8f01490fad17-kube-api-access-ld6t5\") pod \"auto-csr-approver-29567076-xbzbf\" (UID: \"eb0122dc-6ca1-4477-b737-8f01490fad17\") " pod="openshift-infra/auto-csr-approver-29567076-xbzbf" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.430222 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld6t5\" (UniqueName: \"kubernetes.io/projected/eb0122dc-6ca1-4477-b737-8f01490fad17-kube-api-access-ld6t5\") pod \"auto-csr-approver-29567076-xbzbf\" (UID: \"eb0122dc-6ca1-4477-b737-8f01490fad17\") " pod="openshift-infra/auto-csr-approver-29567076-xbzbf" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.463807 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld6t5\" (UniqueName: \"kubernetes.io/projected/eb0122dc-6ca1-4477-b737-8f01490fad17-kube-api-access-ld6t5\") pod \"auto-csr-approver-29567076-xbzbf\" (UID: \"eb0122dc-6ca1-4477-b737-8f01490fad17\") " pod="openshift-infra/auto-csr-approver-29567076-xbzbf" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.466327 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567076-xbzbf" Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.930410 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29567076-xbzbf"] Mar 20 16:36:00 crc kubenswrapper[4813]: I0320 16:36:00.979089 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567076-xbzbf" event={"ID":"eb0122dc-6ca1-4477-b737-8f01490fad17","Type":"ContainerStarted","Data":"36913fd35abd01d7e989282c0cdc99aebebf54303a328c38239193f047ef3080"} Mar 20 16:36:02 crc kubenswrapper[4813]: I0320 16:36:02.998271 4813 generic.go:334] "Generic (PLEG): container finished" podID="eb0122dc-6ca1-4477-b737-8f01490fad17" containerID="df5c26b11a8ee12d71bc9d5736f359f9cb599c6de9a10662694610536eac62ab" exitCode=0 Mar 20 16:36:02 crc kubenswrapper[4813]: I0320 16:36:02.998387 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567076-xbzbf" event={"ID":"eb0122dc-6ca1-4477-b737-8f01490fad17","Type":"ContainerDied","Data":"df5c26b11a8ee12d71bc9d5736f359f9cb599c6de9a10662694610536eac62ab"} Mar 20 16:36:04 crc kubenswrapper[4813]: I0320 16:36:04.376277 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567076-xbzbf" Mar 20 16:36:04 crc kubenswrapper[4813]: I0320 16:36:04.497848 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld6t5\" (UniqueName: \"kubernetes.io/projected/eb0122dc-6ca1-4477-b737-8f01490fad17-kube-api-access-ld6t5\") pod \"eb0122dc-6ca1-4477-b737-8f01490fad17\" (UID: \"eb0122dc-6ca1-4477-b737-8f01490fad17\") " Mar 20 16:36:04 crc kubenswrapper[4813]: I0320 16:36:04.503561 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb0122dc-6ca1-4477-b737-8f01490fad17-kube-api-access-ld6t5" (OuterVolumeSpecName: "kube-api-access-ld6t5") pod "eb0122dc-6ca1-4477-b737-8f01490fad17" (UID: "eb0122dc-6ca1-4477-b737-8f01490fad17"). InnerVolumeSpecName "kube-api-access-ld6t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 16:36:04 crc kubenswrapper[4813]: I0320 16:36:04.601713 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld6t5\" (UniqueName: \"kubernetes.io/projected/eb0122dc-6ca1-4477-b737-8f01490fad17-kube-api-access-ld6t5\") on node \"crc\" DevicePath \"\"" Mar 20 16:36:05 crc kubenswrapper[4813]: I0320 16:36:05.013925 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29567076-xbzbf" event={"ID":"eb0122dc-6ca1-4477-b737-8f01490fad17","Type":"ContainerDied","Data":"36913fd35abd01d7e989282c0cdc99aebebf54303a328c38239193f047ef3080"} Mar 20 16:36:05 crc kubenswrapper[4813]: I0320 16:36:05.014504 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36913fd35abd01d7e989282c0cdc99aebebf54303a328c38239193f047ef3080" Mar 20 16:36:05 crc kubenswrapper[4813]: I0320 16:36:05.013962 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29567076-xbzbf" Mar 20 16:36:05 crc kubenswrapper[4813]: I0320 16:36:05.266155 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:36:05 crc kubenswrapper[4813]: E0320 16:36:05.266415 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:36:05 crc kubenswrapper[4813]: I0320 16:36:05.440534 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29567070-h4fjk"] Mar 20 16:36:05 crc kubenswrapper[4813]: I0320 16:36:05.446897 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29567070-h4fjk"] Mar 20 16:36:07 crc kubenswrapper[4813]: I0320 16:36:07.275829 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10c98d88-c20b-4d75-b340-5ca0096c9a0f" path="/var/lib/kubelet/pods/10c98d88-c20b-4d75-b340-5ca0096c9a0f/volumes" Mar 20 16:36:18 crc kubenswrapper[4813]: I0320 16:36:18.265852 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:36:18 crc kubenswrapper[4813]: E0320 16:36:18.266551 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" Mar 20 16:36:22 crc kubenswrapper[4813]: I0320 16:36:22.082812 4813 scope.go:117] "RemoveContainer" containerID="5050f20a0aef5ab08890cba57b8184a8887f4a3a4ba501e266a49ca322da012f" Mar 20 16:36:32 crc kubenswrapper[4813]: I0320 16:36:32.266341 4813 scope.go:117] "RemoveContainer" containerID="61b9394cd6f5e99382f346d0de791573ce43c9d428e35f3fd3cd379c45966146" Mar 20 16:36:32 crc kubenswrapper[4813]: E0320 16:36:32.267160 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-l8d6t_openshift-machine-config-operator(dbc04883-b38a-4b6a-bee4-f6804c8aad94)\"" pod="openshift-machine-config-operator/machine-config-daemon-l8d6t" podUID="dbc04883-b38a-4b6a-bee4-f6804c8aad94" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515157273645024464 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015157273646017402 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015157264431016515 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015157264431015465 5ustar corecore